mirror of
https://github.com/peeringdb/peeringdb.git
synced 2024-05-11 05:55:09 +00:00
initial commit of code
This commit is contained in:
11
.gitignore
vendored
11
.gitignore
vendored
@@ -1 +1,10 @@
|
|||||||
*.mo
|
.cache/
|
||||||
|
.coverage
|
||||||
|
.facsimile
|
||||||
|
*.pyc
|
||||||
|
*.log
|
||||||
|
*.sql
|
||||||
|
*.swp
|
||||||
|
OLD.*
|
||||||
|
genstatic/
|
||||||
|
venv
|
||||||
|
22
LICENSE
Normal file
22
LICENSE
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
Copyright 2004-2018 PeeringDB
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions
|
||||||
|
are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||||
|
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||||
|
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||||
|
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||||
|
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||||
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
1
config/VERSION
Normal file
1
config/VERSION
Normal file
@@ -0,0 +1 @@
|
|||||||
|
2.11.0.1
|
56
config/facsimile/dev.yaml
Normal file
56
config/facsimile/dev.yaml
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
|
||||||
|
home: {{environ.HOME}}/srv/dev.peeringdb.com
|
||||||
|
|
||||||
|
contact:
|
||||||
|
email: {{environ.USER}}@localhost
|
||||||
|
from: {{environ.USER}}@localhost
|
||||||
|
notify: {{environ.USER}}@localhost
|
||||||
|
sponsorship: {{environ.USER}}@localhost
|
||||||
|
|
||||||
|
rc:
|
||||||
|
db:
|
||||||
|
default:
|
||||||
|
host: localhost
|
||||||
|
name: {{environ.USER}}_peeringdb
|
||||||
|
prefix: {{environ.USER}}_
|
||||||
|
#read:
|
||||||
|
# host: localhost
|
||||||
|
# name: {{environ.USER}}_peeringdb
|
||||||
|
# prefix: {{environ.USER}}_
|
||||||
|
ssl:
|
||||||
|
crt:
|
||||||
|
key:
|
||||||
|
api:
|
||||||
|
url: http://localhost/api
|
||||||
|
depth_result_limit: 250
|
||||||
|
cache:
|
||||||
|
enabled: true
|
||||||
|
dir: {{environ.HOME}}/srv/dev.peeringdb.com/etc/api-cache
|
||||||
|
log: {{environ.HOME}}/srv/dev.peeringdb.com/var/log/api-cache.log
|
||||||
|
|
||||||
|
recaptcha:
|
||||||
|
public_key: 6LdTFzkUAAAAAJpkAfSUOue7iNDcVY3H2xpEUVON
|
||||||
|
|
||||||
|
oauth:
|
||||||
|
enabled: true
|
||||||
|
|
||||||
|
misc:
|
||||||
|
base_url : http://localhost
|
||||||
|
session:
|
||||||
|
domain: localhost
|
||||||
|
suggestions:
|
||||||
|
org_id: 18982
|
||||||
|
|
||||||
|
django:
|
||||||
|
settings: peeringdb_com
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
destdir: {{environ.HOME}}/srv/dev.peeringdb.com
|
||||||
|
user: {{environ.USER}}
|
||||||
|
postcmd:
|
||||||
|
- chmod 0755 {{environ.HOME}}/srv/dev.peeringdb.com/peeringdb/in.whoisd
|
||||||
|
|
||||||
|
public:
|
||||||
|
listen: 0.0.0.0
|
||||||
|
fq_name: no
|
||||||
|
|
12
config/facsimile/facsimile.yaml
Normal file
12
config/facsimile/facsimile.yaml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
|
||||||
|
facsimile:
|
||||||
|
name: peeringdb
|
||||||
|
components:
|
||||||
|
peeringdb:
|
||||||
|
class: Facsimile
|
||||||
|
repo: git@github.com:peeringdb/peeringdb.git
|
||||||
|
|
||||||
|
venv:
|
||||||
|
class: VirtualEnv
|
||||||
|
repo: git@github.com:peeringdb/peeringdb.git
|
||||||
|
|
32
config/facsimile/modules.yaml
Normal file
32
config/facsimile/modules.yaml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
- name: peeringdb
|
||||||
|
genconfig: false
|
||||||
|
write_sql: true
|
||||||
|
db:
|
||||||
|
name: peeringdb
|
||||||
|
selectable: []
|
||||||
|
xbahn:
|
||||||
|
exchange: twentyc.peeringdb
|
||||||
|
|
||||||
|
- name: djangokey
|
||||||
|
genconfig: false
|
||||||
|
write_sql: false
|
||||||
|
|
||||||
|
- name: deskpro
|
||||||
|
genconfig: false
|
||||||
|
write_sql: false
|
||||||
|
|
||||||
|
- name: google_geoloc_api
|
||||||
|
genconfig: false
|
||||||
|
write_sql: false
|
||||||
|
|
||||||
|
- name: lacnic_rdap_apikey
|
||||||
|
genconfig: false
|
||||||
|
write_sql: false
|
||||||
|
|
||||||
|
- name: recaptcha
|
||||||
|
genconfig: false
|
||||||
|
write_sql: false
|
||||||
|
|
||||||
|
- name: email
|
||||||
|
genconfig: false
|
||||||
|
write_sql: false
|
224
config/facsimile/peeringdb.yaml
Normal file
224
config/facsimile/peeringdb.yaml
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
|
||||||
|
rc:
|
||||||
|
base:
|
||||||
|
port: 7003
|
||||||
|
admport: 7002
|
||||||
|
deskpro:
|
||||||
|
url: https://peeringdb.deskpro.com/api/v2/20180226
|
||||||
|
|
||||||
|
misc:
|
||||||
|
# base_url:
|
||||||
|
suggestions:
|
||||||
|
org_id: 0
|
||||||
|
session:
|
||||||
|
domain: peeringdb.com
|
||||||
|
ratelimits:
|
||||||
|
request_login_POST: "4/m"
|
||||||
|
view_request_ownership_POST: "3/m"
|
||||||
|
view_request_ownership_GET: "3/m"
|
||||||
|
view_affiliate_to_org_POST: "3/m"
|
||||||
|
resend_confirmation_mail: "2/m"
|
||||||
|
view_verify_POST: "2/m"
|
||||||
|
view_username_retrieve_initiate: "2/m"
|
||||||
|
request_translation: "2/m"
|
||||||
|
api:
|
||||||
|
throtteling:
|
||||||
|
enabled: true
|
||||||
|
anon: "100/second"
|
||||||
|
user: "100/second"
|
||||||
|
|
||||||
|
|
||||||
|
locale:
|
||||||
|
# enable these locale
|
||||||
|
- en
|
||||||
|
- pt
|
||||||
|
- it
|
||||||
|
- cs_CZ
|
||||||
|
- da_DK
|
||||||
|
- de_DE
|
||||||
|
- fr_FR
|
||||||
|
- id_ID
|
||||||
|
- ja_JP
|
||||||
|
- ru_RU
|
||||||
|
- te_IN
|
||||||
|
- zh_CN
|
||||||
|
|
||||||
|
|
||||||
|
data_quality:
|
||||||
|
# maximum value to allow in network.info_prefixes4
|
||||||
|
max_prefix_v4_limit: 500000
|
||||||
|
# maximum value to allow in network.info_prefixes6
|
||||||
|
max_prefix_v6_limit: 50000
|
||||||
|
|
||||||
|
install:
|
||||||
|
groups:
|
||||||
|
|
||||||
|
- type: tmpl
|
||||||
|
dir: tmpl
|
||||||
|
render_files: true
|
||||||
|
skip: ^\.
|
||||||
|
|
||||||
|
# peeringdb static files
|
||||||
|
- type: copy
|
||||||
|
dir: $SRC_DIR$/peeringdb_server/static
|
||||||
|
pattern: ^$SRC_DIR$
|
||||||
|
replace: peeringdb
|
||||||
|
skip: ^\.
|
||||||
|
|
||||||
|
- type: copy
|
||||||
|
pattern: ^$SRC_DIR$
|
||||||
|
replace: peeringdb
|
||||||
|
files:
|
||||||
|
- $SRC_DIR$/manage.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/api_cache.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/__init__.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/admin.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/models.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/views.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/data_views.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/org_admin_views.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/autocomplete_views.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/apps.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/signals.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/inet.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/mail.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/urls.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/search.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/stats.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/deskpro.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/serializers.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/admin_commandline_tools.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/rest.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/renderers.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/settings.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/forms.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/export_views.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/validators.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/db_router.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/mock.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/__init__.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/__init__.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_deskpro_publish.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/_db_command.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_api_cache.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_api_test.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_fac_merge.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_fac_merge_undo.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_renumber_lans.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_undelete.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_reversion_inspect.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_status.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_whois.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_sponsorship_notify.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_batch_replace.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_ixp_merge.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_geosync.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_ixf_ixp_member_import.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_stats.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_deskpro_requeue.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/management/commands/pdb_generate_test_data.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/__init__.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0001_initial.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0002_partnernship_model.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0003_add_lat_lon_to_address_models.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0004_geocode_fields.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0005_lat_lon_field_rename.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0006_network_allow_ixp_update.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0007_ixlan_json_member_list_url.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0008_ixf_import_log.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0009_rename_json_member_list_field.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0010_rename_ixf_ixp_member_list_url.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0011_commandline_tool.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0012_deskpro.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0013_user_locale.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0014_clt_description.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/migrations/0015_email_address.py
|
||||||
|
- $SRC_DIR$/fixtures/initial_data.json
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/admin/admin_extended.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/admin/user-organizations.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/request-ownership.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/admin/org_merge_tool.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/admin/peeringdb_server/commandlinetool/change_list.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/admin/peeringdb_server/commandlinetool/preview_command.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/admin/peeringdb_server/commandlinetool/prepare_command.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/admin/peeringdb_server/commandlinetool/run_command.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/verify.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/register.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/oauth-login.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/account/email_confirm.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/account/login.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/index.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/header.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/footer.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_header.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/error.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/error_404.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/error_403.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_network_side.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_network_bottom.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_exchange_side.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_exchange_bottom.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_facility_side.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_facility_bottom.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_network_assets.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_facility_assets.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_exchange_assets.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_organization_side.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_organization_bottom.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_organization_assets.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_exchange_tools.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_organization_tools.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/entity_create.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_network_tools.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_facility_tools.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/login.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/inline_search.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/inline_search_hidden.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/verification_banner.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/header-sponsorships.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/search_result_frame.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/search_result.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/base.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/advanced-search.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/password-reset.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/header-partnerships.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/partnerships.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/username-retrieve-complete.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/username-retrieve.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/entity_suggest.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_suggest_fac.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_suggest_ix.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/view_suggest_net.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/username-retrieve.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/password-reset.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-org-admin-user-affil.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-pdb-admin-user-affil.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-org-admin-user-affil-denied.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-org-admin-user-affil-approved.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-sponsorship-admin-expiration.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-pdb-admin-vq.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-pdb-admin-asnauto-skipvq.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-pdb-admin-asnauto-entity-creation.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-pdb-admin-asnauto-affil.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-org-admin-merge.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-user-uoar-ownership-approved.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/email/notify-pdb-admin-rdap-error.txt
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/rest_framework_swagger/base.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templatetags/__init__.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/templatetags/util.py
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/advanced-search-net.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/advanced-search-fac.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/advanced-search-ix.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/sponsorships.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/simple_content.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/site/aup.html
|
||||||
|
- $SRC_DIR$/peeringdb_server/templates/oauth2_provider/base.html
|
||||||
|
|
||||||
|
# peeringdb locale files
|
||||||
|
- type: copy
|
||||||
|
dir: $SRC_DIR$/locale
|
||||||
|
pattern: ^$SRC_DIR$
|
||||||
|
replace: peeringdb
|
||||||
|
skip: ^\.
|
||||||
|
|
30
config/facsimile/tmpl/_ALL_/_DEPLOY_/etc/django-uwsgi.ini
Normal file
30
config/facsimile/tmpl/_ALL_/_DEPLOY_/etc/django-uwsgi.ini
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
|
||||||
|
[uwsgi]
|
||||||
|
# project base dir
|
||||||
|
chdir={{env.home}}/peeringdb
|
||||||
|
# wsgi app to run
|
||||||
|
module={{env.django.settings}}.wsgi
|
||||||
|
# virtualenv
|
||||||
|
home={{env.home}}/venv
|
||||||
|
|
||||||
|
# TODO instance ID + port base
|
||||||
|
socket=127.0.0.1:{{env.rc.base.admport}}
|
||||||
|
logger=syslog:uwsgi.pdb,local0
|
||||||
|
|
||||||
|
# headers only
|
||||||
|
|
||||||
|
# see nginx.conf's "large_client_header_buffers" for details
|
||||||
|
# (uwsgi protocol max is 65535)
|
||||||
|
buffer-size=65535
|
||||||
|
|
||||||
|
#enable-threads=true
|
||||||
|
#threads=5
|
||||||
|
|
||||||
|
master=true
|
||||||
|
processes=20
|
||||||
|
max-requests=1000
|
||||||
|
# terminate request after 60 seconds
|
||||||
|
harakiri=60
|
||||||
|
|
||||||
|
# systemd conformance
|
||||||
|
die-on-term = true
|
89
config/facsimile/tmpl/_ALL_/_DEPLOY_/etc/mime.types
Normal file
89
config/facsimile/tmpl/_ALL_/_DEPLOY_/etc/mime.types
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
|
||||||
|
types {
|
||||||
|
text/html html htm shtml;
|
||||||
|
text/css css;
|
||||||
|
text/xml xml;
|
||||||
|
image/gif gif;
|
||||||
|
image/jpeg jpeg jpg;
|
||||||
|
application/javascript js;
|
||||||
|
application/atom+xml atom;
|
||||||
|
application/rss+xml rss;
|
||||||
|
|
||||||
|
text/mathml mml;
|
||||||
|
text/plain txt;
|
||||||
|
text/vnd.sun.j2me.app-descriptor jad;
|
||||||
|
text/vnd.wap.wml wml;
|
||||||
|
text/x-component htc;
|
||||||
|
|
||||||
|
image/png png;
|
||||||
|
image/tiff tif tiff;
|
||||||
|
image/vnd.wap.wbmp wbmp;
|
||||||
|
image/x-icon ico;
|
||||||
|
image/x-jng jng;
|
||||||
|
image/x-ms-bmp bmp;
|
||||||
|
image/svg+xml svg svgz;
|
||||||
|
image/webp webp;
|
||||||
|
|
||||||
|
application/font-woff woff;
|
||||||
|
application/java-archive jar war ear;
|
||||||
|
application/json json;
|
||||||
|
application/mac-binhex40 hqx;
|
||||||
|
application/msword doc;
|
||||||
|
application/pdf pdf;
|
||||||
|
application/postscript ps eps ai;
|
||||||
|
application/rtf rtf;
|
||||||
|
application/vnd.apple.mpegurl m3u8;
|
||||||
|
application/vnd.ms-excel xls;
|
||||||
|
application/vnd.ms-fontobject eot;
|
||||||
|
application/vnd.ms-powerpoint ppt;
|
||||||
|
application/vnd.wap.wmlc wmlc;
|
||||||
|
application/vnd.google-earth.kml+xml kml;
|
||||||
|
application/vnd.google-earth.kmz kmz;
|
||||||
|
application/x-7z-compressed 7z;
|
||||||
|
application/x-cocoa cco;
|
||||||
|
application/x-java-archive-diff jardiff;
|
||||||
|
application/x-java-jnlp-file jnlp;
|
||||||
|
application/x-makeself run;
|
||||||
|
application/x-perl pl pm;
|
||||||
|
application/x-pilot prc pdb;
|
||||||
|
application/x-rar-compressed rar;
|
||||||
|
application/x-redhat-package-manager rpm;
|
||||||
|
application/x-sea sea;
|
||||||
|
application/x-shockwave-flash swf;
|
||||||
|
application/x-stuffit sit;
|
||||||
|
application/x-tcl tcl tk;
|
||||||
|
application/x-x509-ca-cert der pem crt;
|
||||||
|
application/x-xpinstall xpi;
|
||||||
|
application/xhtml+xml xhtml;
|
||||||
|
application/xspf+xml xspf;
|
||||||
|
application/zip zip;
|
||||||
|
|
||||||
|
application/octet-stream bin exe dll;
|
||||||
|
application/octet-stream deb;
|
||||||
|
application/octet-stream dmg;
|
||||||
|
application/octet-stream iso img;
|
||||||
|
application/octet-stream msi msp msm;
|
||||||
|
|
||||||
|
application/vnd.openxmlformats-officedocument.wordprocessingml.document docx;
|
||||||
|
application/vnd.openxmlformats-officedocument.spreadsheetml.sheet xlsx;
|
||||||
|
application/vnd.openxmlformats-officedocument.presentationml.presentation pptx;
|
||||||
|
|
||||||
|
audio/midi mid midi kar;
|
||||||
|
audio/mpeg mp3;
|
||||||
|
audio/ogg ogg;
|
||||||
|
audio/x-m4a m4a;
|
||||||
|
audio/x-realaudio ra;
|
||||||
|
|
||||||
|
video/3gpp 3gpp 3gp;
|
||||||
|
video/mp2t ts;
|
||||||
|
video/mp4 mp4;
|
||||||
|
video/mpeg mpeg mpg;
|
||||||
|
video/quicktime mov;
|
||||||
|
video/webm webm;
|
||||||
|
video/x-flv flv;
|
||||||
|
video/x-m4v m4v;
|
||||||
|
video/x-mng mng;
|
||||||
|
video/x-ms-asf asx asf;
|
||||||
|
video/x-ms-wmv wmv;
|
||||||
|
video/x-msvideo avi;
|
||||||
|
}
|
2
config/facsimile/tmpl/_ALL_/_DEPLOY_/etc/my.cnf
Normal file
2
config/facsimile/tmpl/_ALL_/_DEPLOY_/etc/my.cnf
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[client]
|
||||||
|
password={{module.peeringdb.password}}
|
15
config/facsimile/tmpl/_ALL_/_DEPLOY_/etc/uwsgi_params
Normal file
15
config/facsimile/tmpl/_ALL_/_DEPLOY_/etc/uwsgi_params
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
|
||||||
|
uwsgi_param QUERY_STRING $query_string;
|
||||||
|
uwsgi_param REQUEST_METHOD $request_method;
|
||||||
|
uwsgi_param CONTENT_TYPE $content_type;
|
||||||
|
uwsgi_param CONTENT_LENGTH $content_length;
|
||||||
|
|
||||||
|
uwsgi_param REQUEST_URI $request_uri;
|
||||||
|
uwsgi_param PATH_INFO $document_uri;
|
||||||
|
uwsgi_param DOCUMENT_ROOT $document_root;
|
||||||
|
uwsgi_param SERVER_PROTOCOL $server_protocol;
|
||||||
|
|
||||||
|
uwsgi_param REMOTE_ADDR $remote_addr;
|
||||||
|
uwsgi_param REMOTE_PORT $remote_port;
|
||||||
|
uwsgi_param SERVER_PORT $server_port;
|
||||||
|
uwsgi_param SERVER_NAME $server_name;
|
@@ -0,0 +1,15 @@
|
|||||||
|
if [ -d "peeringdb_server" ]; then
|
||||||
|
echo "Moving peeringdb migrations directory temporarily ..."
|
||||||
|
mv peeringdb_server/migrations peeringdb_server/migrations_ignore
|
||||||
|
echo "Fake applying NSP migrations ..."
|
||||||
|
python manage.py migrate django_namespace_perms --fake
|
||||||
|
echo "Applying django migrations ..."
|
||||||
|
python manage.py migrate
|
||||||
|
echo "Restoring peeringdb migrations directory ..."
|
||||||
|
mv peeringdb_server/migrations_ignore peeringdb_server/migrations
|
||||||
|
echo "Fake applying peeringdb_server migrations ..."
|
||||||
|
python manage.py pdb_d111_migrate
|
||||||
|
echo "Done!"
|
||||||
|
else
|
||||||
|
echo "Script needs to be run in peeringdb project directory (same location as peeringdb_server)"
|
||||||
|
fi
|
25
config/facsimile/tmpl/_ALL_/_DEPLOY_/peeringdb/in.whoisd
Executable file
25
config/facsimile/tmpl/_ALL_/_DEPLOY_/peeringdb/in.whoisd
Executable file
@@ -0,0 +1,25 @@
|
|||||||
|
#!{{env.home}}/venv/bin/python
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
#print "path", sys.path
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "peeringdb_com.settings")
|
||||||
|
|
||||||
|
from django.core.management import execute_from_command_line
|
||||||
|
|
||||||
|
inp = sys.stdin.readline().strip()
|
||||||
|
argv = ['in.whoisd', 'pdb_whois', inp]
|
||||||
|
execute_from_command_line(argv)
|
||||||
|
|
||||||
|
except BaseException as e:
|
||||||
|
# TODO log here - need to inherit
|
||||||
|
# log = logging.getLogger('pdb.script.whois')
|
||||||
|
# log.exception(e)
|
||||||
|
print("an error occurred: {}".format(e))
|
||||||
|
pass
|
@@ -0,0 +1,147 @@
|
|||||||
|
|
||||||
|
ALLOWED_HOSTS = ['*']
|
||||||
|
{% if facs.release_environment == 'dev' or facs.release_environment == 'veny_dev' %}
|
||||||
|
DEBUG = True
|
||||||
|
TEMPLATES[0]["OPTIONS"]["debug"] = DEBUG
|
||||||
|
MAIL_DEBUG = DEBUG
|
||||||
|
#INSTALLED_APPS += (
|
||||||
|
# 'debug_toolbar',
|
||||||
|
# )
|
||||||
|
#DEBUG_TOOLBAR_PATCH_SETTINGS = False
|
||||||
|
{% else %}
|
||||||
|
DEBUG = False
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
RELEASE_ENV = '{{ facs.release_environment }}'
|
||||||
|
|
||||||
|
PACKAGE_VERSION='{{facs.version}}'
|
||||||
|
EMAIL_SUBJECT_PREFIX='[{{facs.release_environment}}] '
|
||||||
|
# from for errors
|
||||||
|
SERVER_EMAIL='{{env.contact.email}}'
|
||||||
|
# from for users
|
||||||
|
DEFAULT_FROM_EMAIL='{{env.contact.email}}'
|
||||||
|
SPONSORSHIPS_EMAIL='{{env.contact.sponsorship}}'
|
||||||
|
ADMINS = (
|
||||||
|
('Support', '{{env.contact.email}}'),
|
||||||
|
)
|
||||||
|
|
||||||
|
{% if env.mail %}
|
||||||
|
EMAIL_HOST = '{{env.mail.host}}'
|
||||||
|
EMAIL_PORT = {{env.mail.port}}
|
||||||
|
EMAIL_HOST_USER = '{{env.mail.user}}'
|
||||||
|
EMAIL_HOST_PASSWORD = '{{module.email.password}}'
|
||||||
|
EMAIL_USE_TLS = {{env.mail.tls}}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
STATIC_ROOT = '{{env.home}}/static'
|
||||||
|
STATIC_URL = '/s/{{facs.version}}/'
|
||||||
|
|
||||||
|
MEDIA_ROOT = '{{env.home}}/media'
|
||||||
|
MEDIA_URL = '/m/{{facs.version}}/'
|
||||||
|
|
||||||
|
SECRET_KEY = '{{module.djangokey.password}}'
|
||||||
|
|
||||||
|
{% if env.misc.session.domain %}
|
||||||
|
SESSION_COOKIE_DOMAIN = '{{ env.misc.session.domain }}'
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
SESSION_COOKIE_SECURE = True
|
||||||
|
|
||||||
|
RECAPTCHA_PUBLIC_KEY = '{{ env.recaptcha.public_key }}'
|
||||||
|
RECAPTCHA_SECRET_KEY = '{{ module.recaptcha.password }}'
|
||||||
|
|
||||||
|
DESKPRO_KEY = '{{ module.deskpro.password }}'
|
||||||
|
DESKPRO_URL = '{{ env.rc.deskpro.url }}'
|
||||||
|
|
||||||
|
API_URL = '{{env.rc.api.url}}'
|
||||||
|
API_DEPTH_ROW_LIMIT = {{env.rc.api.depth_result_limit}}
|
||||||
|
API_CACHE_ROOT = '{{env.rc.api.cache.dir}}'
|
||||||
|
API_CACHE_ENABLED = {% if env.rc.api.cache.enabled %}True{% else %}False{% endif %}
|
||||||
|
|
||||||
|
GOOGLE_GEOLOC_API_KEY = '{{ module.google_geoloc_api.password }}'
|
||||||
|
|
||||||
|
RDAP_LACNIC_APIKEY = '{{ module.lacnic_rdap_apikey.password }}'
|
||||||
|
|
||||||
|
{% if "log" in env.rc.api.cache %}
|
||||||
|
API_CACHE_LOG = "{{ env.rc.api.cache.log }}"
|
||||||
|
{% else %}
|
||||||
|
API_CACHE_LOG = os.path.join(API_CACHE_ROOT,'log.log')
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
{% for db_name, db in env.rc.db.items() %}
|
||||||
|
'{{db_name}}': {
|
||||||
|
'ENGINE': 'django.db.backends.{{env.rc.db.engine | default('mysql')}}',
|
||||||
|
'HOST': '{{db.host}}',
|
||||||
|
'PORT': '{{db.port}}',
|
||||||
|
'NAME': '{{db.prefix}}{{module.peeringdb.db.name}}',
|
||||||
|
'USER': '{{db.prefix}}{{module.peeringdb.name}}',
|
||||||
|
'PASSWORD': '{{module.peeringdb.password}}',
|
||||||
|
},
|
||||||
|
{% endfor %}
|
||||||
|
}
|
||||||
|
|
||||||
|
{% if 'read' in env.rc.db %}
|
||||||
|
DATABASE_ROUTERS = ["peeringdb_server.db_router.DatabaseRouter"]
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
CONN_MAX_AGE = 3600
|
||||||
|
|
||||||
|
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FWD_PROTO', 'https')
|
||||||
|
|
||||||
|
OAUTH_ENABLED = {% if env.oauth.enabled %}True{% else %}False{% endif %}
|
||||||
|
|
||||||
|
LOGGING = {
|
||||||
|
'version': 1,
|
||||||
|
'disable_existing_loggers': False,
|
||||||
|
'handlers': {
|
||||||
|
# Include the default Django email handler for errors
|
||||||
|
# This is what you'd get without configuring logging at all.
|
||||||
|
'mail_admins': {
|
||||||
|
'class': 'django.utils.log.AdminEmailHandler',
|
||||||
|
'level': 'ERROR',
|
||||||
|
# But the emails are plain text by default - HTML is nicer
|
||||||
|
'include_html': True,
|
||||||
|
},
|
||||||
|
# Log to a text file that can be rotated by logrotate
|
||||||
|
'logfile': {
|
||||||
|
'class': 'logging.handlers.WatchedFileHandler',
|
||||||
|
'filename': '{{env.home}}/var/log/django.log'
|
||||||
|
},
|
||||||
|
'console': {
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'class': 'logging.StreamHandler',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'loggers': {
|
||||||
|
# Again, default Django configuration to email unhandled exceptions
|
||||||
|
'django.request': {
|
||||||
|
'handlers': ['mail_admins'],
|
||||||
|
'level': 'ERROR',
|
||||||
|
'propagate': True,
|
||||||
|
},
|
||||||
|
# Might as well log any errors anywhere else in Django
|
||||||
|
'django': {
|
||||||
|
# 'handlers': ['console', 'logfile'],
|
||||||
|
# 'level': 'DEBUG',
|
||||||
|
'handlers': ['logfile'],
|
||||||
|
'level': 'ERROR',
|
||||||
|
'propagate': False,
|
||||||
|
},
|
||||||
|
# Your own app - this assumes all your logger names start with "myapp."
|
||||||
|
'': {
|
||||||
|
'handlers': ['logfile'],
|
||||||
|
'level': 'WARNING', # Or maybe INFO or DEBUG
|
||||||
|
'propagate': False
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
# make all loggers use the console.
|
||||||
|
for logger in LOGGING['loggers']:
|
||||||
|
LOGGING['loggers'][logger]['handlers'] = ['console']
|
||||||
|
|
@@ -0,0 +1,123 @@
|
|||||||
|
|
||||||
|
ADMINS = (
|
||||||
|
('Support', '{{env.contact.email}}'),
|
||||||
|
)
|
||||||
|
|
||||||
|
NSP_MODE = "crud"
|
||||||
|
|
||||||
|
AUTH_USER_MODEL = 'peeringdb_server.User'
|
||||||
|
|
||||||
|
GRAPPELLI_ADMIN_TITLE = 'PeeringDB'
|
||||||
|
|
||||||
|
LOGIN_URL = "/login"
|
||||||
|
LOGIN_REDIRECT_URL = "/"
|
||||||
|
|
||||||
|
BASE_URL = "{{env.misc.base_url}}"
|
||||||
|
PASSWORD_RESET_URL = "{{env.misc.base_url}}/reset-password"
|
||||||
|
|
||||||
|
ACCOUNT_EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL = "/login"
|
||||||
|
ACCOUNT_EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL = "/verify"
|
||||||
|
ACCOUNT_EMAIL_REQUIRED = True
|
||||||
|
|
||||||
|
# all suggested entities will be created under this org
|
||||||
|
SUGGEST_ENTITY_ORG = {{env.misc.suggestions.org_id}}
|
||||||
|
|
||||||
|
CSRF_FAILURE_VIEW = 'peeringdb_server.views.view_http_error_csrf'
|
||||||
|
|
||||||
|
RECAPTCHA_VERIFY_URL = 'https://www.google.com/recaptcha/api/siteverify'
|
||||||
|
|
||||||
|
#'user' user group
|
||||||
|
USER_GROUP_ID=2
|
||||||
|
|
||||||
|
#'guest' user group
|
||||||
|
GUEST_GROUP_ID=1
|
||||||
|
|
||||||
|
MIDDLEWARE_CLASSES += (
|
||||||
|
'mobi.middleware.MobileDetectionMiddleware',
|
||||||
|
'oauth2_provider.middleware.OAuth2TokenMiddleware',
|
||||||
|
'corsheaders.middleware.CorsMiddleware',
|
||||||
|
)
|
||||||
|
|
||||||
|
MOBI_DETECT_TABLET = True
|
||||||
|
|
||||||
|
INSTALLED_APPS += [
|
||||||
|
'dal',
|
||||||
|
'dal_select2',
|
||||||
|
'grappelli',
|
||||||
|
'django.contrib.admin',
|
||||||
|
'allauth',
|
||||||
|
'allauth.account',
|
||||||
|
'allauth.socialaccount',
|
||||||
|
'allauth.socialaccount.providers.google',
|
||||||
|
'allauth.socialaccount.providers.facebook',
|
||||||
|
'bootstrap3',
|
||||||
|
'corsheaders',
|
||||||
|
'crispy_forms',
|
||||||
|
'django_countries',
|
||||||
|
'django_forms_bootstrap',
|
||||||
|
'django_inet',
|
||||||
|
'django_namespace_perms',
|
||||||
|
'django_peeringdb',
|
||||||
|
'django_tables2',
|
||||||
|
'oauth2_provider',
|
||||||
|
'peeringdb_server',
|
||||||
|
'reversion',
|
||||||
|
]
|
||||||
|
|
||||||
|
# django_peeringdb settings
|
||||||
|
PEERINGDB_ABSTRACT_ONLY = True
|
||||||
|
|
||||||
|
# add user defined iso code for Kosovo
|
||||||
|
COUNTRIES_OVERRIDE = {
|
||||||
|
'XK': _('Kosovo'),
|
||||||
|
}
|
||||||
|
|
||||||
|
AUTHENTICATION_BACKENDS += (
|
||||||
|
'oauth2_provider.backends.OAuth2Backend',
|
||||||
|
'allauth.account.auth_backends.AuthenticationBackend',
|
||||||
|
)
|
||||||
|
|
||||||
|
# No one specific host is allow Allow-Origin at this point]
|
||||||
|
# Origin for API get request is handled via signals (signals.py)
|
||||||
|
CORS_ORIGIN_WHITELIST = []
|
||||||
|
|
||||||
|
# don't allow cookies
|
||||||
|
CORS_ALLOW_CREDENTIALS = False
|
||||||
|
|
||||||
|
# only allow for cross origin requests for GET and OPTIONS
|
||||||
|
CORS_ALLOW_METHODS = ["GET", "OPTIONS"]
|
||||||
|
|
||||||
|
OAUTH2_PROVIDER = {
|
||||||
|
'SCOPES': {
|
||||||
|
'profile': 'user profile',
|
||||||
|
'email': 'email address',
|
||||||
|
'networks': 'list of user networks and permissions',
|
||||||
|
},
|
||||||
|
'ALLOWED_REDIRECT_URI_SCHEMES': ['https'],
|
||||||
|
'REQUEST_APPROVAL_PROMPT': 'auto',
|
||||||
|
}
|
||||||
|
|
||||||
|
# maximum value to allow in network.info_prefixes4
|
||||||
|
DATA_QUALITY_MAX_PREFIX_V4_LIMIT = {{ env.data_quality.max_prefix_v4_limit }}
|
||||||
|
|
||||||
|
# maximum value to allow in network.info_prefixes6
|
||||||
|
DATA_QUALITY_MAX_PREFIX_V6_LIMIT = {{ env.data_quality.max_prefix_v6_limit }}
|
||||||
|
|
||||||
|
RATELIMITS = {
|
||||||
|
{% for k,v in env.misc.ratelimits.items() %}
|
||||||
|
"{{ k }}" : "{{ v }}",
|
||||||
|
{% endfor %}
|
||||||
|
}
|
||||||
|
|
||||||
|
CACHES = {
|
||||||
|
"default" : {
|
||||||
|
"BACKEND" : "django.core.cache.backends.db.DatabaseCache",
|
||||||
|
"LOCATION" : "django_cache",
|
||||||
|
"OPTIONS" : {
|
||||||
|
# maximum number of entries in the cache
|
||||||
|
"MAX_ENTRIES" : 5000,
|
||||||
|
# once max entries are reach delete 500 of the oldest entries
|
||||||
|
"CULL_FREQUENCY" : 10
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,34 @@
|
|||||||
|
|
||||||
|
# If you set this to False, Django will make some optimizations so as not
|
||||||
|
# to load the internationalization machinery.
|
||||||
|
USE_I18N = True
|
||||||
|
#LANGUAGE_SESSION_KEY
|
||||||
|
#LANGUAGE_COOKIE_NAME
|
||||||
|
#LANGUAGE_COOKIE_AGE
|
||||||
|
#LANGUAGE_COOKIE_DOMAIN
|
||||||
|
#LANGUAGE_COOKIE_PATH
|
||||||
|
PROJECT_PATH = os.path.dirname(os.path.dirname(__file__))
|
||||||
|
LOCALE_PATHS = (
|
||||||
|
'',
|
||||||
|
os.path.join(PROJECT_PATH, 'locale/'),
|
||||||
|
)
|
||||||
|
LANGUAGES = [
|
||||||
|
{% if 'en' in env.locale %}('en', _('English')),{% endif %}
|
||||||
|
{% if 'pt' in env.locale %}('pt', _('Portuguese')),{% endif %}
|
||||||
|
{% if 'it' in env.locale %}('it', _('Italian')),{% endif %}
|
||||||
|
{% if 'cs_CZ' in env.locale %}('cs-cz', _('Czech')),{% endif %}
|
||||||
|
{% if 'da_DK' in env.locale %}('da-dk', _('Danish')),{% endif %}
|
||||||
|
{% if 'de_DE' in env.locale %}('de-de', _('German')),{% endif %}
|
||||||
|
{% if 'fr_FR' in env.locale %}('fr-fr', _('French')),{% endif %}
|
||||||
|
{% if 'id_ID' in env.locale %}('id-id', _('Indonesian')),{% endif %}
|
||||||
|
{% if 'ja_JP' in env.locale %}('ja-jp', _('Japanese')),{% endif %}
|
||||||
|
{% if 'ru_RU' in env.locale %}('ru-ru', _('Russian')),{% endif %}
|
||||||
|
{% if 'te_IN' in env.locale %}('te-en', _('Telugu')),{% endif %}
|
||||||
|
{% if 'zh_CN' in env.locale %}('zh-cn', _('Chinese')),{% endif %}
|
||||||
|
]
|
||||||
|
# Language code for this installation. All choices can be found here:
|
||||||
|
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||||
|
LANGUAGE_CODE = 'en-us'
|
||||||
|
# If you set this to False, Django will not format dates, numbers and
|
||||||
|
# calendars according to the current locale.
|
||||||
|
USE_L10N = True
|
@@ -0,0 +1,39 @@
|
|||||||
|
# uncomment to re-enable xbahn support
|
||||||
|
#INSTALLED_APPS += [
|
||||||
|
# 'django_xbahn',
|
||||||
|
# ]
|
||||||
|
|
||||||
|
# remove _ to re enabled xbahn connection
|
||||||
|
_XBAHN = {
|
||||||
|
"instances" : {
|
||||||
|
"main" : {
|
||||||
|
"host" : "localhost",
|
||||||
|
"port" : 5672,
|
||||||
|
"user" : "",
|
||||||
|
"pass" : "",
|
||||||
|
"log" : "debug",
|
||||||
|
"exchange" : "twentyc.xbahn"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"replication" : {
|
||||||
|
"debug" : False,
|
||||||
|
"interval" : 0.5,
|
||||||
|
"replicate" : ["namespace_perms"],
|
||||||
|
"main" : {
|
||||||
|
"namespace" : "peeringdb_server.replication"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"bridge" : {
|
||||||
|
"main" : {
|
||||||
|
"exposed" : [
|
||||||
|
("peeringdb_server.models", "Network"),
|
||||||
|
("peeringdb_server.models", "Facility"),
|
||||||
|
("peeringdb_server.models", "InternetExchange"),
|
||||||
|
("peeringdb_server.models", "User"),
|
||||||
|
("twentyc.xbahn.django.auth", "Authentication")
|
||||||
|
],
|
||||||
|
"namespace" : "peeringdb_server.bridge"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@@ -0,0 +1,60 @@
|
|||||||
|
INSTALLED_APPS += (
|
||||||
|
'rest_framework',
|
||||||
|
'rest_framework_swagger'
|
||||||
|
)
|
||||||
|
|
||||||
|
REST_FRAMEWORK = {
|
||||||
|
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||||
|
'rest_framework.authentication.BasicAuthentication',
|
||||||
|
'rest_framework.authentication.SessionAuthentication'
|
||||||
|
),
|
||||||
|
|
||||||
|
# Use hyperlinked styles by default.
|
||||||
|
# Only used if the `serializer_class` attribute is not set on a view.
|
||||||
|
'DEFAULT_MODEL_SERIALIZER_CLASS':
|
||||||
|
'rest_framework.serializers.HyperlinkedModelSerializer',
|
||||||
|
|
||||||
|
# Use Django's standard `django.contrib.auth` permissions,
|
||||||
|
# or allow read-only access for unauthenticated users.
|
||||||
|
# Handle rest of permissioning via django-namespace-perms
|
||||||
|
'DEFAULT_PERMISSION_CLASSES': [
|
||||||
|
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly',
|
||||||
|
'django_namespace_perms.rest.BasePermission',
|
||||||
|
],
|
||||||
|
|
||||||
|
'DEFAULT_RENDERER_CLASSES': (
|
||||||
|
'peeringdb_server.renderers.MetaJSONRenderer',
|
||||||
|
),
|
||||||
|
|
||||||
|
{% if env.misc.api.throtteling.enabled %}
|
||||||
|
|
||||||
|
'DEFAULT_THROTTLE_CLASSES': (
|
||||||
|
'rest_framework.throttling.AnonRateThrottle',
|
||||||
|
'rest_framework.throttling.UserRateThrottle'
|
||||||
|
),
|
||||||
|
|
||||||
|
'DEFAULT_THROTTLE_RATES': {
|
||||||
|
'anon': '{{ env.misc.api.throtteling.anon }}',
|
||||||
|
'user': '{{ env.misc.api.throtteling.user }}'
|
||||||
|
},
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
SWAGGER_SETTINGS = {
|
||||||
|
'api_version' : '0.1',
|
||||||
|
'api_path' : '/api',
|
||||||
|
'enabled_methods' : [
|
||||||
|
'get',
|
||||||
|
'post',
|
||||||
|
'put',
|
||||||
|
'delete'
|
||||||
|
],
|
||||||
|
'info' : {
|
||||||
|
'contact' : '{{env.contact.email}}',
|
||||||
|
'description' : 'PeeringDB REST API'
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,13 @@
|
|||||||
|
|
||||||
|
PASSWORD_HASHERS = (
|
||||||
|
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.BCryptPasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.SHA1PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.MD5PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.CryptPasswordHasher',
|
||||||
|
'hashers_passlib.md5_crypt',
|
||||||
|
'hashers_passlib.des_crypt',
|
||||||
|
'hashers_passlib.bsdi_crypt',
|
||||||
|
)
|
||||||
|
|
@@ -0,0 +1,154 @@
|
|||||||
|
# Django settings
|
||||||
|
|
||||||
|
import os
|
||||||
|
BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')
|
||||||
|
|
||||||
|
# lazy init for translations
|
||||||
|
_ = lambda s: s
|
||||||
|
|
||||||
|
ADMINS = ()
|
||||||
|
MANAGERS = ADMINS
|
||||||
|
|
||||||
|
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
|
||||||
|
|
||||||
|
# Local time zone for this installation. Choices can be found here:
|
||||||
|
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
||||||
|
# although not all choices may be available on all operating systems.
|
||||||
|
# In a Windows environment this must be set to your system time zone.
|
||||||
|
TIME_ZONE = 'UTC'
|
||||||
|
|
||||||
|
SITE_ID = 1
|
||||||
|
|
||||||
|
|
||||||
|
# If you set this to False, Django will not use timezone-aware datetimes.
|
||||||
|
USE_TZ = True
|
||||||
|
|
||||||
|
# Absolute filesystem path to the directory that will hold user-uploaded files.
|
||||||
|
# Example: "/home/media/media.lawrence.com/media/"
|
||||||
|
MEDIA_ROOT = ''
|
||||||
|
|
||||||
|
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||||
|
# trailing slash.
|
||||||
|
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
|
||||||
|
MEDIA_URL = ''
|
||||||
|
|
||||||
|
# Additional locations of static files
|
||||||
|
STATICFILES_DIRS = (
|
||||||
|
# Put strings here, like "/home/html/static" or "C:/www/django/static".
|
||||||
|
# Always use forward slashes, even on Windows.
|
||||||
|
# Don't forget to use absolute paths, not relative paths.
|
||||||
|
)
|
||||||
|
|
||||||
|
# List of finder classes that know how to find static files in
|
||||||
|
# various locations.
|
||||||
|
STATICFILES_FINDERS = (
|
||||||
|
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
|
||||||
|
'django.contrib.staticfiles.finders.FileSystemFinder',
|
||||||
|
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
|
||||||
|
)
|
||||||
|
|
||||||
|
STATIC_URL = '/s/'
|
||||||
|
|
||||||
|
# List of callables that know how to import templates from various sources.
|
||||||
|
_TEMPLATE_LOADERS = (
|
||||||
|
'django.template.loaders.filesystem.Loader',
|
||||||
|
'django.template.loaders.app_directories.Loader',
|
||||||
|
# 'django.template.loaders.eggs.Loader',
|
||||||
|
)
|
||||||
|
|
||||||
|
_TEMPLATE_CONTEXT_PROCESSORS = (
|
||||||
|
"django.contrib.auth.context_processors.auth",
|
||||||
|
"django.template.context_processors.debug",
|
||||||
|
"django.template.context_processors.request",
|
||||||
|
"django.template.context_processors.i18n",
|
||||||
|
"django.template.context_processors.media",
|
||||||
|
"django.template.context_processors.static",
|
||||||
|
"django.template.context_processors.tz",
|
||||||
|
"django.contrib.messages.context_processors.messages",
|
||||||
|
)
|
||||||
|
|
||||||
|
_TEMPLATE_DIRS = (
|
||||||
|
os.path.join(BASE_DIR, "templates"),
|
||||||
|
os.path.join(BASE_DIR, "peeringdb_server", "templates"),
|
||||||
|
)
|
||||||
|
|
||||||
|
TEMPLATES = [
|
||||||
|
{
|
||||||
|
"BACKEND" : 'django.template.backends.django.DjangoTemplates',
|
||||||
|
"DIRS" : _TEMPLATE_DIRS,
|
||||||
|
"APP_DIRS" : True,
|
||||||
|
"OPTIONS" : {
|
||||||
|
"context_processors" : _TEMPLATE_CONTEXT_PROCESSORS,
|
||||||
|
#"loaders" : _TEMPLATE_LOADERS
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
# django <1.10 compat
|
||||||
|
#MIDDLEWARE_CLASSES = ()
|
||||||
|
|
||||||
|
MIDDLEWARE_CLASSES = (
|
||||||
|
'django.middleware.common.CommonMiddleware',
|
||||||
|
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||||
|
'django.middleware.locale.LocaleMiddleware',
|
||||||
|
'django.middleware.csrf.CsrfViewMiddleware',
|
||||||
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||||
|
'django.contrib.messages.middleware.MessageMiddleware',
|
||||||
|
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||||
|
)
|
||||||
|
|
||||||
|
ROOT_URLCONF = 'peeringdb_com.urls'
|
||||||
|
|
||||||
|
# Python dotted path to the WSGI application used by Django's runserver.
|
||||||
|
WSGI_APPLICATION = 'peeringdb_com.wsgi.application'
|
||||||
|
|
||||||
|
CRISPY_TEMPLATE_PACK = 'bootstrap3'
|
||||||
|
|
||||||
|
INSTALLED_APPS = [
|
||||||
|
'django.contrib.auth',
|
||||||
|
'django.contrib.contenttypes',
|
||||||
|
'django.contrib.sessions',
|
||||||
|
'django.contrib.sites',
|
||||||
|
'django.contrib.messages',
|
||||||
|
'django.contrib.staticfiles',
|
||||||
|
]
|
||||||
|
|
||||||
|
# A sample logging configuration. The only tangible logging
|
||||||
|
# performed by this configuration is to send an email to
|
||||||
|
# the site admins on every HTTP 500 error when DEBUG=False.
|
||||||
|
# See http://docs.djangoproject.com/en/dev/topics/logging for
|
||||||
|
# more details on how to customize your logging configuration.
|
||||||
|
LOGGING = {
|
||||||
|
'version': 1,
|
||||||
|
'disable_existing_loggers': False,
|
||||||
|
'formatters' : {},
|
||||||
|
'filters': {
|
||||||
|
'require_debug_false': {
|
||||||
|
'()': 'django.utils.log.RequireDebugFalse'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'handlers': {
|
||||||
|
'mail_admins': {
|
||||||
|
'level': 'ERROR',
|
||||||
|
'filters': ['require_debug_false'],
|
||||||
|
'class': 'django.utils.log.AdminEmailHandler'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'loggers': {
|
||||||
|
'django.request': {
|
||||||
|
'handlers': ['mail_admins'],
|
||||||
|
'level': 'ERROR',
|
||||||
|
'propagate': True,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
AUTHENTICATION_BACKENDS = ("django_namespace_perms.auth.backends.NSPBackend",)
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import glob
|
||||||
|
conffiles = glob.glob(os.path.join(os.path.dirname(__file__), 'settings.d', '*.conf'))
|
||||||
|
conffiles.sort()
|
||||||
|
for f in conffiles:
|
||||||
|
execfile(os.path.abspath(f))
|
||||||
|
|
@@ -0,0 +1,36 @@
|
|||||||
|
from django.conf.urls import include, url
|
||||||
|
from django.conf.urls.static import static
|
||||||
|
from django.conf import settings
|
||||||
|
from django.views.generic.base import RedirectView
|
||||||
|
|
||||||
|
# auto admin
|
||||||
|
from django.contrib import admin
|
||||||
|
admin.autodiscover()
|
||||||
|
|
||||||
|
import peeringdb_server.urls
|
||||||
|
|
||||||
|
import allauth.account.views
|
||||||
|
|
||||||
|
from peeringdb_server.views import view_login
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
url(r'^grappelli/', include('grappelli.urls')),
|
||||||
|
#FIXME: adapt to DAL3 changes
|
||||||
|
#url(r'^autocomplete/', include('dal.urls')),
|
||||||
|
#FIXME: can remove this if we upgrade to allauth > 0.24.2, upgrade
|
||||||
|
#has been held off at this point because it requires migrations
|
||||||
|
url(r'^accounts/confirm-email/(?P<key>[-:\w]+)/$', allauth.account.views.confirm_email, name="account_confirm_email"),
|
||||||
|
url(r'^accounts/', include('allauth.urls')),
|
||||||
|
url(r'^cp/peeringdb_server/organizationmerge/add/', RedirectView.as_view(url='/cp/peeringdb_server/organization/org-merge-tool', permanent=False)),
|
||||||
|
# we want to use default pdb login for admin area, since that is rate limited.
|
||||||
|
url(r'^cp/login/', view_login),
|
||||||
|
url(r'^cp/', include(admin.site.urls)),
|
||||||
|
]
|
||||||
|
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
||||||
|
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||||
|
urlpatterns += peeringdb_server.urls.urlpatterns
|
||||||
|
|
||||||
|
handler_404 = 'peeringdb_server.views.view_http_error_404'
|
||||||
|
handler_403 = 'peeringdb_server.views.view_http_error_403'
|
||||||
|
|
@@ -0,0 +1,42 @@
|
|||||||
|
"""
|
||||||
|
WSGI config for peeringdb project.
|
||||||
|
|
||||||
|
This module contains the WSGI application used by Django's development server
|
||||||
|
and any production WSGI deployments. It should expose a module-level variable
|
||||||
|
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
|
||||||
|
this application via the ``WSGI_APPLICATION`` setting.
|
||||||
|
|
||||||
|
Usually you will have the standard Django WSGI application here, but it also
|
||||||
|
might make sense to replace the whole Django WSGI application with a custom one
|
||||||
|
that later delegates to the Django one. For example, you could introduce WSGI
|
||||||
|
middleware here, or combine a Django application with an application of another
|
||||||
|
framework.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "peeringdb_com.settings")
|
||||||
|
|
||||||
|
# This application object is used by any WSGI server configured to use this
|
||||||
|
# file. This includes Django's development server, if the WSGI_APPLICATION
|
||||||
|
# setting points here.
|
||||||
|
from django.core.wsgi import get_wsgi_application
|
||||||
|
application = get_wsgi_application()
|
||||||
|
|
||||||
|
# Apply WSGI middleware here.
|
||||||
|
# from helloworld.wsgi import HelloWorldApplication
|
||||||
|
# application = HelloWorldApplication(application)
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
if 'django_xbahn' in settings.INSTALLED_APPS:
|
||||||
|
import threading
|
||||||
|
import django_xbahn as xbahn
|
||||||
|
|
||||||
|
t = threading.Thread(target=xbahn.connect)
|
||||||
|
t.start();
|
||||||
|
|
||||||
|
if hasattr(settings, 'XBAHN') and settings.XBAHN.get("replication"):
|
||||||
|
import twentyc.xbahn.django.replication as replication
|
||||||
|
replication.replicator.daemon = True
|
||||||
|
replication.start()
|
0
config/facsimile/tmpl/_ALL_/_DEPLOY_/var/log/keep
Normal file
0
config/facsimile/tmpl/_ALL_/_DEPLOY_/var/log/keep
Normal file
142
docs/deploy.md
Normal file
142
docs/deploy.md
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
|
||||||
|
# PeeringDB Deploy
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
This document uses the following variables
|
||||||
|
|
||||||
|
```sh
|
||||||
|
export FACS_REPO=git@github.com:20c/facsimile.git
|
||||||
|
export PDB_REPO=git@git.20c.com:pdb/peeringdb.git
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install obfuscation tools (Only needed if you want to obfuscate js)
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone src.20c.com:20c/sys-deploy
|
||||||
|
mkdir -p ~/.local/google
|
||||||
|
wget https://dl.google.com/closure-compiler/compiler-latest.zip
|
||||||
|
unzip compiler-latest.zip
|
||||||
|
mv compiler.jar ~/.local/google
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install facsimile
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone $FACS_REPO
|
||||||
|
cd facsimile
|
||||||
|
python setup.py install
|
||||||
|
```
|
||||||
|
### Clone peeringdb
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone $PDB_REPO
|
||||||
|
```
|
||||||
|
|
||||||
|
# Developer instance deploymnet
|
||||||
|
|
||||||
|
Create ~/srv
|
||||||
|
|
||||||
|
```sh
|
||||||
|
mkdir ~/srv
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
facs $component $environment ($version|--src-dir=. for dev)
|
||||||
|
```
|
||||||
|
|
||||||
|
Use the `facs` command to deploy a virtualenv and server files for your dev instance
|
||||||
|
|
||||||
|
```sh
|
||||||
|
facs peeringdb dev --src-dir=.
|
||||||
|
facs venv dev --src-dir=.
|
||||||
|
```
|
||||||
|
|
||||||
|
Files will be deployed to `~/srv/dev.peeringdb.com`
|
||||||
|
|
||||||
|
## Setup passwords
|
||||||
|
|
||||||
|
Once you have run `facs` for the first time it will have created a .facsimile directory
|
||||||
|
|
||||||
|
You will want to open `.facimsile/state/{env}/state.yaml` and set the correct passwords for everything and then do
|
||||||
|
**another** deploy to make sure the correct passwords are deployed.
|
||||||
|
|
||||||
|
```
|
||||||
|
instances:
|
||||||
|
inmap: {}
|
||||||
|
uiidmap: {}
|
||||||
|
passwd:
|
||||||
|
deskpro: xxx # deskpro api key
|
||||||
|
djangokey: xxx # django secret
|
||||||
|
google_geoloc_api: xxx # google geolocation api key
|
||||||
|
peeringdb: xxx # database password
|
||||||
|
recaptcha: xxx # recaptcha secret
|
||||||
|
```
|
||||||
|
|
||||||
|
## Create api-cache dir
|
||||||
|
|
||||||
|
```
|
||||||
|
mkdir ~/srv/dev.peeringdb.com/etc/api-cache
|
||||||
|
```
|
||||||
|
|
||||||
|
## Symlink for convenience
|
||||||
|
|
||||||
|
In order to be able to run the manage.py command out of the pdb repository you can symlink the peeringdb_com directory from deploy location
|
||||||
|
|
||||||
|
In the peeringdb repo root:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
ln -s ~/srv/dev.peeringdb.com/peeringdb/peeringdb_com peeringdb_com
|
||||||
|
```
|
||||||
|
|
||||||
|
## Setup database
|
||||||
|
|
||||||
|
During deploy facsimile will have created a sql file at `.facsimile/tmp/RELEASE/dev/peeringdb/init.sql` - load it into mysql.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
mysql -u root -p < .facsimile/tmp/RELEASE/dev/peeringdb/init.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
## Migrate database - empty, from scratch
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./manage.py migrate
|
||||||
|
./manage.py createcachetable
|
||||||
|
./manage.py loaddata fixtures/initial_data.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running the dev instance
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./manage.py runserver
|
||||||
|
```
|
||||||
|
|
||||||
|
## Hangups
|
||||||
|
|
||||||
|
### Authentication not working
|
||||||
|
|
||||||
|
This is usually caused by misconfigured session settings
|
||||||
|
|
||||||
|
In `peeringdb_com/settings.d/01-local.conf`
|
||||||
|
|
||||||
|
- Check that `SESSION_COOKIE_DOMAIN` is set to the apropriate domain
|
||||||
|
- Check that `SESSION_COOKIE_SECURE` is `False` if youre not serving over https
|
||||||
|
|
||||||
|
# Versioning
|
||||||
|
|
||||||
|
Everything is versioned for deploy, using facsimile.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# to update dev versions
|
||||||
|
version_bump_dev
|
||||||
|
|
||||||
|
# to update release versions
|
||||||
|
version_merge_release
|
||||||
|
```
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pytest -v -rxs --cov-report term-missing --cov=peeringdb_server/ --capture=sys tests/
|
||||||
|
```
|
||||||
|
|
126
docs/development.md
Normal file
126
docs/development.md
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
|
||||||
|
# PeeringDB Server Development
|
||||||
|
|
||||||
|
## Models
|
||||||
|
|
||||||
|
Note: to add fk's to base models, you must add in both peeringdb.models. and in django_peeringdb.models concrete class
|
||||||
|
|
||||||
|
models.py
|
||||||
|
- make model
|
||||||
|
- add ref_tag_
|
||||||
|
|
||||||
|
serializers.py
|
||||||
|
- add serializer
|
||||||
|
|
||||||
|
peeringdb/rest.py
|
||||||
|
- make ViewSet
|
||||||
|
- register
|
||||||
|
|
||||||
|
perms
|
||||||
|
|
||||||
|
ALWAYS
|
||||||
|
|
||||||
|
- on the model create a classmethod called nsp_namespace_from_id
|
||||||
|
that should take all the ids it needs to make its namespace
|
||||||
|
and return that namespace
|
||||||
|
|
||||||
|
Look at the Network or NetworkContact class for examples
|
||||||
|
|
||||||
|
- on the model create a property method called nsp_namespace
|
||||||
|
that calls and returns __class__.nsp_namespace_from_id with
|
||||||
|
the aproporiate ids
|
||||||
|
|
||||||
|
- on the serializer create a method called nsp_namespace_create
|
||||||
|
that returns the namespace to be checked for creation perms
|
||||||
|
|
||||||
|
this method will be passed the validated serializer data so
|
||||||
|
you can use the ids / objects in there to help build your namespace
|
||||||
|
|
||||||
|
SOMETIMES
|
||||||
|
|
||||||
|
- on the model create a method called nsp_has_perms_PUT that
|
||||||
|
chould return weither or not the user has access to update
|
||||||
|
the instance. This is needed because in some cases in order
|
||||||
|
to update an existing object the user may need to be checked
|
||||||
|
on perms for more than one namespace - this lets you do that
|
||||||
|
|
||||||
|
Look at validate_PUT_ownership for helper function
|
||||||
|
|
||||||
|
- if the model is supposed to be rendered in a list somewhere
|
||||||
|
eg network contacts in poc_set under network make sure list
|
||||||
|
namespacing is setup correctly - again look at Network
|
||||||
|
and NetworkContact for examples.
|
||||||
|
|
||||||
|
|
||||||
|
## Modules
|
||||||
|
|
||||||
|
### RDAP
|
||||||
|
|
||||||
|
- Add output to parsing test
|
||||||
|
|
||||||
|
```sh
|
||||||
|
curl -L https://rdap.db.ripe.net/autnum/$ASN > tests/data/rdap/autnum/$ASN.input
|
||||||
|
```
|
||||||
|
|
||||||
|
or
|
||||||
|
```sh
|
||||||
|
scripts/rdap_getasn.sh
|
||||||
|
scripts/rdap_getent.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
- Pretty print RDAP data
|
||||||
|
|
||||||
|
```sh
|
||||||
|
munge json:https://rdap.arin.net/registry/autnum/2914 yaml:
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Whois Server
|
||||||
|
|
||||||
|
- To locally test whois, setup `/etc/xinetd.d/pdb-whois` with similar:
|
||||||
|
|
||||||
|
```
|
||||||
|
service whois
|
||||||
|
{
|
||||||
|
disable = no
|
||||||
|
socket_type = stream
|
||||||
|
wait = no
|
||||||
|
user = $USER
|
||||||
|
|
||||||
|
passenv =
|
||||||
|
|
||||||
|
server = /home/$USER/srv/dev.peeringdb.com/peeringdb/in.whoisd
|
||||||
|
log_on_failure = HOST
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
- Deploy and test against local
|
||||||
|
|
||||||
|
```sh
|
||||||
|
facs peeringdb dev --src-dir=. ; whois -h 127.0.0.1 as63311
|
||||||
|
pytest -v -rxs --cov-report term-missing --cov=peeringdb_server/ --capture=sys tests/
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### 404 on static files with runserver:
|
||||||
|
|
||||||
|
Make sure it's in debug mode
|
||||||
|
|
||||||
|
### api tests fail
|
||||||
|
|
||||||
|
You need to specify the test directory:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pytest tests/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Can't see error because of warnings
|
||||||
|
|
||||||
|
Run pytest with `-p no:warnings`
|
||||||
|
|
||||||
|
### Run one specific test
|
||||||
|
|
||||||
|
Run pytest with `-k $test_name`
|
35
docs/translation.md
Normal file
35
docs/translation.md
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
|
||||||
|
# Integerating translations (Developers)
|
||||||
|
|
||||||
|
## Generate a new locale
|
||||||
|
|
||||||
|
Call makemessages and pass the locale to the `-l` option
|
||||||
|
|
||||||
|
Clone https://github.com/peeringdb/django-peeringdb somewhere
|
||||||
|
Symlink django_peeringdb in the same location as manage.py - this is so makemessages collects the locale from there as well.
|
||||||
|
|
||||||
|
```
|
||||||
|
django-admin makemessages -l de -s --no-wrap
|
||||||
|
django-admin makemessages -d djangojs -l de -s --no-wrap
|
||||||
|
```
|
||||||
|
|
||||||
|
## Updating messages in existing locale
|
||||||
|
|
||||||
|
This will add any new messages to all locale files. In other words if there has been new features added, you want to call this to make sure that their messages exist in gettext so they can be translated.
|
||||||
|
|
||||||
|
Clone https://github.com/peeringdb/django-peeringdb somewhere
|
||||||
|
Symlink django_peeringdb in the same location as manage.py - this is so makemessages collects the locale from there as well.
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
django-admin makemessages -a -s --no-wrap
|
||||||
|
django-admin makemessages -d djangojs -a -s --no-wrap
|
||||||
|
```
|
||||||
|
|
||||||
|
## Compile messages
|
||||||
|
|
||||||
|
Once translation files are ready, you need to compile them so django can use them.
|
||||||
|
|
||||||
|
```
|
||||||
|
django-admin compilemessages
|
||||||
|
```
|
43
fixtures/initial_data.json
Normal file
43
fixtures/initial_data.json
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"model" : "auth.group",
|
||||||
|
"pk" : 1,
|
||||||
|
"fields" : {
|
||||||
|
"name" : "guest"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model" : "auth.group",
|
||||||
|
"pk" : 2,
|
||||||
|
"fields" : {
|
||||||
|
"name" : "user"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model" : "django_namespace_perms.grouppermission",
|
||||||
|
"pk" : 1,
|
||||||
|
"fields" : {
|
||||||
|
"group" : 1,
|
||||||
|
"namespace" : "peeringdb.organization",
|
||||||
|
"permissions" : 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model" : "django_namespace_perms.grouppermission",
|
||||||
|
"pk" : 5,
|
||||||
|
"fields" : {
|
||||||
|
"group" : 2,
|
||||||
|
"namespace" : "peeringdb.organization",
|
||||||
|
"permissions" : 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model" : "django_namespace_perms.grouppermission",
|
||||||
|
"pk" : 6,
|
||||||
|
"fields" : {
|
||||||
|
"group" : 2,
|
||||||
|
"namespace" : "peeringdb.organization.*.network.*.poc_set.users",
|
||||||
|
"permissions" : 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
BIN
locale/cs_CZ/LC_MESSAGES/django.mo
Normal file
BIN
locale/cs_CZ/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
locale/cs_CZ/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/cs_CZ/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
BIN
locale/da_DK/LC_MESSAGES/django.mo
Normal file
BIN
locale/da_DK/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
locale/da_DK/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/da_DK/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
locale/el_GR/LC_MESSAGES/django.mo
Normal file
BIN
locale/el_GR/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
locale/el_GR/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/el_GR/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
BIN
locale/en_US/LC_MESSAGES/django.mo
Normal file
BIN
locale/en_US/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
locale/en_US/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/en_US/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
Binary file not shown.
BIN
locale/fr_FR/LC_MESSAGES/django.mo
Normal file
BIN
locale/fr_FR/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
locale/fr_FR/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/fr_FR/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
BIN
locale/id_ID/LC_MESSAGES/django.mo
Normal file
BIN
locale/id_ID/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
locale/id_ID/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/id_ID/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
BIN
locale/it/LC_MESSAGES/django.mo
Normal file
BIN
locale/it/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
@@ -591,7 +591,7 @@ msgstr "il comando è stato eseguito in questa data e ora"
|
|||||||
|
|
||||||
#: peeringdb_server/org_admin_views.py:140
|
#: peeringdb_server/org_admin_views.py:140
|
||||||
msgid "Organization and all Entities it owns"
|
msgid "Organization and all Entities it owns"
|
||||||
msgstr "Organizzazioni e tutte le Entità che possiede"
|
msgstr "Organizzazione e tutte le Entità che possiede"
|
||||||
|
|
||||||
#: peeringdb_server/org_admin_views.py:141
|
#: peeringdb_server/org_admin_views.py:141
|
||||||
msgid "Any Network"
|
msgid "Any Network"
|
||||||
@@ -715,7 +715,7 @@ msgstr "questa pagina"
|
|||||||
|
|
||||||
#: peeringdb_server/templates/account/login.html:7
|
#: peeringdb_server/templates/account/login.html:7
|
||||||
msgid "instead"
|
msgid "instead"
|
||||||
msgstr "in alternativa"
|
msgstr "invece"
|
||||||
|
|
||||||
#: peeringdb_server/templates/admin/peeringdb_server/commandlinetool/prepare_command.html:18
|
#: peeringdb_server/templates/admin/peeringdb_server/commandlinetool/prepare_command.html:18
|
||||||
#: peeringdb_server/templates/admin/peeringdb_server/commandlinetool/preview_command.html:16
|
#: peeringdb_server/templates/admin/peeringdb_server/commandlinetool/preview_command.html:16
|
||||||
@@ -1090,7 +1090,7 @@ msgstr ""
|
|||||||
|
|
||||||
#: peeringdb_server/templates/email/username-retrieve.txt:7
|
#: peeringdb_server/templates/email/username-retrieve.txt:7
|
||||||
msgid "If you did not request username retrieval, you can ignore this email."
|
msgid "If you did not request username retrieval, you can ignore this email."
|
||||||
msgstr "Se non hai richiesto il recupero del nome utente, ignora questa mail"
|
msgstr "Se non hai richiesto il recupero del nome utente, ignora questa email"
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/advanced-search-fac.html:21
|
#: peeringdb_server/templates/site/advanced-search-fac.html:21
|
||||||
msgid "Address"
|
msgid "Address"
|
||||||
@@ -1494,11 +1494,11 @@ msgstr "Profilo"
|
|||||||
|
|
||||||
#: peeringdb_server/templates/site/header.html:81
|
#: peeringdb_server/templates/site/header.html:81
|
||||||
msgid "Logout"
|
msgid "Logout"
|
||||||
msgstr "Scollegati"
|
msgstr "Scollègati"
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/header.html:85
|
#: peeringdb_server/templates/site/header.html:85
|
||||||
msgid "Register"
|
msgid "Register"
|
||||||
msgstr "Registrati"
|
msgstr "Regìstrati"
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/header.html:85
|
#: peeringdb_server/templates/site/header.html:85
|
||||||
#: peeringdb_server/templates/site/view_exchange_bottom.html:40
|
#: peeringdb_server/templates/site/view_exchange_bottom.html:40
|
||||||
@@ -1516,7 +1516,7 @@ msgstr "o"
|
|||||||
#: peeringdb_server/templates/site/header.html:85
|
#: peeringdb_server/templates/site/header.html:85
|
||||||
#: peeringdb_server/templates/site/login.html:21
|
#: peeringdb_server/templates/site/login.html:21
|
||||||
msgid "Login"
|
msgid "Login"
|
||||||
msgstr "Collegati"
|
msgstr "Collègati"
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/index.html:34
|
#: peeringdb_server/templates/site/index.html:34
|
||||||
msgid "PeeringDB facilitates the exchange of information related to Peering."
|
msgid "PeeringDB facilitates the exchange of information related to Peering."
|
||||||
@@ -1548,7 +1548,7 @@ msgstr "puoi registrarti qui"
|
|||||||
|
|
||||||
#: peeringdb_server/templates/site/index.html:44
|
#: peeringdb_server/templates/site/index.html:44
|
||||||
msgid "Please register ONLY if you are a peering network."
|
msgid "Please register ONLY if you are a peering network."
|
||||||
msgstr "Si prega di registrarsi SOLO se sei una rete di peering"
|
msgstr "Si prega di registrarsi SOLO se si è una rete di peering"
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/index.html:44
|
#: peeringdb_server/templates/site/index.html:44
|
||||||
msgid "You may also"
|
msgid "You may also"
|
||||||
@@ -1605,15 +1605,15 @@ msgstr "Ho dimenticato il mio nome utente .."
|
|||||||
|
|
||||||
#: peeringdb_server/templates/site/oauth-login.html:6
|
#: peeringdb_server/templates/site/oauth-login.html:6
|
||||||
msgid "Login with oAuth"
|
msgid "Login with oAuth"
|
||||||
msgstr "Collegati con oAuth"
|
msgstr "Collègati con oAuth"
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/oauth-login.html:8
|
#: peeringdb_server/templates/site/oauth-login.html:8
|
||||||
msgid "Login with your google account"
|
msgid "Login with your google account"
|
||||||
msgstr "Collegati con il tuo account google"
|
msgstr "Collègati con il tuo account google"
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/oauth-login.html:9
|
#: peeringdb_server/templates/site/oauth-login.html:9
|
||||||
msgid "Login with your facebook account"
|
msgid "Login with your facebook account"
|
||||||
msgstr "Collegati con il tuo account facebook"
|
msgstr "Collègati con il tuo account facebook"
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/password-reset.html:15
|
#: peeringdb_server/templates/site/password-reset.html:15
|
||||||
msgid ""
|
msgid ""
|
||||||
@@ -1878,11 +1878,11 @@ msgid ""
|
|||||||
"with have reviewed your account. Thank you for your patience."
|
"with have reviewed your account. Thank you for your patience."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Non avrai pieno accesso finché noi o l'organizzazione a cui sei affiliato "
|
"Non avrai pieno accesso finché noi o l'organizzazione a cui sei affiliato "
|
||||||
"non avranno revisionato il tuo account. Grazie per la vostra pazienza."
|
"non avremo revisionato il tuo account. Grazie per la vostra pazienza."
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/verify.html:75
|
#: peeringdb_server/templates/site/verify.html:75
|
||||||
msgid "Affiliate with organization"
|
msgid "Affiliate with organization"
|
||||||
msgstr "Affiliati con l'organizzazione"
|
msgstr "Affìliati con l'organizzazione"
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/verify.html:80
|
#: peeringdb_server/templates/site/verify.html:80
|
||||||
#, python-format
|
#, python-format
|
||||||
@@ -1925,7 +1925,7 @@ msgstr ""
|
|||||||
|
|
||||||
#: peeringdb_server/templates/site/verify.html:118
|
#: peeringdb_server/templates/site/verify.html:118
|
||||||
msgid "Affiliate"
|
msgid "Affiliate"
|
||||||
msgstr "Affiliati"
|
msgstr "Affìliati"
|
||||||
|
|
||||||
#: peeringdb_server/templates/site/verify.html:121
|
#: peeringdb_server/templates/site/verify.html:121
|
||||||
#: peeringdb_server/templates/site/view_organization_tools.html:329
|
#: peeringdb_server/templates/site/view_organization_tools.html:329
|
||||||
@@ -2617,7 +2617,7 @@ msgstr "Contatti"
|
|||||||
|
|
||||||
#: peeringdb_server/views.py:1099 peeringdb_server/views.py:1242
|
#: peeringdb_server/views.py:1099 peeringdb_server/views.py:1242
|
||||||
msgid "Company Website"
|
msgid "Company Website"
|
||||||
msgstr "Sito web della compagnia"
|
msgstr "Sito web dell'azienda"
|
||||||
|
|
||||||
#: peeringdb_server/views.py:1109
|
#: peeringdb_server/views.py:1109
|
||||||
msgid "Technical Email"
|
msgid "Technical Email"
|
||||||
@@ -2693,7 +2693,7 @@ msgstr "Account disabilitato."
|
|||||||
|
|
||||||
#: peeringdb_server/views.py:1563
|
#: peeringdb_server/views.py:1563
|
||||||
msgid "Invalid username/password."
|
msgid "Invalid username/password."
|
||||||
msgstr "Nome utente o password non validi"
|
msgstr "Nome utente o password non valido"
|
||||||
|
|
||||||
#: tests/django_init.py:62
|
#: tests/django_init.py:62
|
||||||
msgid "English"
|
msgid "English"
|
||||||
|
BIN
locale/it/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/it/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
BIN
locale/ja_JP/LC_MESSAGES/django.mo
Normal file
BIN
locale/ja_JP/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
locale/ja_JP/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/ja_JP/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
Binary file not shown.
BIN
locale/ru_RU/LC_MESSAGES/django.mo
Normal file
BIN
locale/ru_RU/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
locale/ru_RU/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/ru_RU/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
BIN
locale/te_IN/LC_MESSAGES/django.mo
Normal file
BIN
locale/te_IN/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
locale/te_IN/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/te_IN/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
BIN
locale/zh_CN/LC_MESSAGES/django.mo
Normal file
BIN
locale/zh_CN/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
BIN
locale/zh_CN/LC_MESSAGES/djangojs.mo
Normal file
BIN
locale/zh_CN/LC_MESSAGES/djangojs.mo
Normal file
Binary file not shown.
10
manage.py
Executable file
10
manage.py
Executable file
@@ -0,0 +1,10 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "peeringdb_com.settings")
|
||||||
|
|
||||||
|
from django.core.management import execute_from_command_line
|
||||||
|
|
||||||
|
execute_from_command_line(sys.argv)
|
1
peeringdb_server/__init__.py
Normal file
1
peeringdb_server/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
default_app_config = 'peeringdb_server.apps.PeeringDBServerAppConfig'
|
1371
peeringdb_server/admin.py
Normal file
1371
peeringdb_server/admin.py
Normal file
File diff suppressed because it is too large
Load Diff
224
peeringdb_server/admin_commandline_tools.py
Normal file
224
peeringdb_server/admin_commandline_tools.py
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
import StringIO
|
||||||
|
import json
|
||||||
|
import reversion
|
||||||
|
from dal import autocomplete
|
||||||
|
from django import forms
|
||||||
|
from django.core.management import call_command
|
||||||
|
from peeringdb_server.models import (COMMANDLINE_TOOLS, CommandLineTool,
|
||||||
|
InternetExchange, Facility)
|
||||||
|
|
||||||
|
|
||||||
|
def _(m):
|
||||||
|
return m
|
||||||
|
|
||||||
|
|
||||||
|
TOOL_MAP = {}
|
||||||
|
|
||||||
|
|
||||||
|
def register_tool(cls):
|
||||||
|
TOOL_MAP[cls.tool] = cls
|
||||||
|
|
||||||
|
|
||||||
|
def get_tool(tool_id, form):
|
||||||
|
"""
|
||||||
|
Arguments:
|
||||||
|
tool_id (str): tool_id as it exists in COMMANDLINE_TOOLS
|
||||||
|
form (django.forms.Form): form instance
|
||||||
|
Returns:
|
||||||
|
CommandLineToolWrapper instance
|
||||||
|
"""
|
||||||
|
t = TOOL_MAP.get(tool_id)
|
||||||
|
t = t(form)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def get_tool_from_data(data):
|
||||||
|
"""
|
||||||
|
Arguments:
|
||||||
|
data (dict): dict containing form data, at the very least
|
||||||
|
needs to have a "tool" key containing the tool_id
|
||||||
|
Returns:
|
||||||
|
CommandLineToolWrapper instance
|
||||||
|
"""
|
||||||
|
tool_id = data.get("tool")
|
||||||
|
t = TOOL_MAP.get(tool_id)
|
||||||
|
form = t.Form(data)
|
||||||
|
form.is_valid()
|
||||||
|
t = t(form)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
class EmptyId(object):
|
||||||
|
id = 0
|
||||||
|
|
||||||
|
|
||||||
|
class CommandLineToolWrapper(object):
|
||||||
|
|
||||||
|
tool = None
|
||||||
|
|
||||||
|
class Form(forms.Form):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __init__(self, form):
|
||||||
|
self.status = 0
|
||||||
|
self.result = None
|
||||||
|
self.args = []
|
||||||
|
self.kwargs = {}
|
||||||
|
self.form_instance = form
|
||||||
|
self.set_arguments(form.cleaned_data)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return dict(COMMANDLINE_TOOLS).get(self.tool)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def form(self):
|
||||||
|
return self.Form()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def description(self):
|
||||||
|
return self.tool
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pretty_result(self):
|
||||||
|
if not self.result:
|
||||||
|
return ""
|
||||||
|
r = []
|
||||||
|
for line in self.result.split("\n"):
|
||||||
|
if line.find("[error]") > -1:
|
||||||
|
r.append('<div class="error">{}</div>'.format(line))
|
||||||
|
elif line.find("[warning]") > -1:
|
||||||
|
r.append('<div class="warning">{}</div>'.format(line))
|
||||||
|
else:
|
||||||
|
r.append('<div class="info">{}</div>'.format(line))
|
||||||
|
return "\n".join(r)
|
||||||
|
|
||||||
|
def set_arguments(self, form_data):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@reversion.create_revision()
|
||||||
|
def run(self, user, commit=False):
|
||||||
|
r = StringIO.StringIO()
|
||||||
|
try:
|
||||||
|
if commit:
|
||||||
|
call_command(self.tool, *self.args, commit=True, stdout=r,
|
||||||
|
**self.kwargs)
|
||||||
|
else:
|
||||||
|
call_command(self.tool, *self.args, stdout=r, **self.kwargs)
|
||||||
|
self.result = r.getvalue()
|
||||||
|
except Exception as inst:
|
||||||
|
self.result = "[error] {}".format(inst)
|
||||||
|
self.status = 1
|
||||||
|
|
||||||
|
if commit:
|
||||||
|
CommandLineTool.objects.create(user=user, tool=self.tool,
|
||||||
|
description=self.description,
|
||||||
|
arguments=json.dumps({
|
||||||
|
"args": self.args,
|
||||||
|
"kwargs": self.kwargs
|
||||||
|
}), result=self.result)
|
||||||
|
return self.result
|
||||||
|
|
||||||
|
|
||||||
|
# TOOL: RENUMBER LAN
|
||||||
|
|
||||||
|
|
||||||
|
@register_tool
|
||||||
|
class ToolRenumberLans(CommandLineToolWrapper):
|
||||||
|
"""
|
||||||
|
This tools runs the pdb_renumber_lans command to
|
||||||
|
Renumber IP Spaces in an Exchange
|
||||||
|
"""
|
||||||
|
|
||||||
|
tool = "pdb_renumber_lans"
|
||||||
|
|
||||||
|
class Form(forms.Form):
|
||||||
|
exchange = forms.ModelChoiceField(
|
||||||
|
queryset=InternetExchange.handleref.undeleted().order_by("name"),
|
||||||
|
widget=autocomplete.ModelSelect2(url="/autocomplete/ix/json"))
|
||||||
|
old_prefix = forms.CharField(
|
||||||
|
help_text=_(
|
||||||
|
"Three leftmost octets of the original prefix - eg. xxx.xxx.xxx"
|
||||||
|
))
|
||||||
|
new_prefix = forms.CharField(
|
||||||
|
help_text=_(
|
||||||
|
"Three leftmost octets of the new prefix - eg. xxx.xxx.xxx"))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def description(self):
|
||||||
|
""" Provide a human readable description of the command that was run """
|
||||||
|
return "{}: {} to {}".format(
|
||||||
|
InternetExchange.objects.get(id=self.kwargs["ix"]), self.args[0],
|
||||||
|
self.args[1])
|
||||||
|
|
||||||
|
def set_arguments(self, form_data):
|
||||||
|
self.args = [form_data.get("old_prefix"), form_data.get("new_prefix")]
|
||||||
|
self.kwargs = {"ix": form_data.get("exchange", EmptyId()).id}
|
||||||
|
|
||||||
|
|
||||||
|
@register_tool
|
||||||
|
class ToolMergeFacilities(CommandLineToolWrapper):
|
||||||
|
"""
|
||||||
|
This tool runs the pdb_fac_merge command to
|
||||||
|
merge two facilities
|
||||||
|
"""
|
||||||
|
|
||||||
|
tool = "pdb_fac_merge"
|
||||||
|
|
||||||
|
class Form(forms.Form):
|
||||||
|
other = forms.ModelChoiceField(
|
||||||
|
queryset=Facility.handleref.undeleted().order_by("name"),
|
||||||
|
widget=autocomplete.ModelSelect2(url="/autocomplete/fac/json"),
|
||||||
|
help_text=_("Merge this facility - it will be deleted"))
|
||||||
|
|
||||||
|
target = forms.ModelChoiceField(
|
||||||
|
queryset=Facility.handleref.undeleted().order_by("name"),
|
||||||
|
widget=autocomplete.ModelSelect2(url="/autocomplete/fac/json"),
|
||||||
|
help_text=_("Target facility"))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def description(self):
|
||||||
|
""" Provide a human readable description of the command that was run """
|
||||||
|
return "{} into {}".format(
|
||||||
|
Facility.objects.get(id=self.kwargs["ids"]),
|
||||||
|
Facility.objects.get(id=self.kwargs["target"]))
|
||||||
|
|
||||||
|
def set_arguments(self, form_data):
|
||||||
|
self.kwargs = {
|
||||||
|
"ids": str(form_data.get("other", EmptyId()).id),
|
||||||
|
"target": str(form_data.get("target", EmptyId()).id)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@register_tool
|
||||||
|
class ToolMergeFacilitiesUndo(CommandLineToolWrapper):
|
||||||
|
"""
|
||||||
|
This tool runs the pdb_fac_merge_undo command to
|
||||||
|
undo a facility merge
|
||||||
|
"""
|
||||||
|
|
||||||
|
tool = "pdb_fac_merge_undo"
|
||||||
|
|
||||||
|
class Form(forms.Form):
|
||||||
|
merge = forms.ModelChoiceField(
|
||||||
|
queryset=CommandLineTool.objects.filter(
|
||||||
|
tool="pdb_fac_merge").order_by("-created"),
|
||||||
|
widget=autocomplete.ModelSelect2(
|
||||||
|
url="/autocomplete/admin/clt-history/pdb_fac_merge/"),
|
||||||
|
help_text=_("Undo this merge"))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def description(self):
|
||||||
|
""" Provide a human readable description of the command that was run """
|
||||||
|
|
||||||
|
# in order to make a useful description we need to collect the arguments
|
||||||
|
# from the merge command that was undone
|
||||||
|
kwargs = json.loads(
|
||||||
|
CommandLineTool.objects.get(
|
||||||
|
id=self.kwargs["clt"]).arguments).get("kwargs")
|
||||||
|
return "Undo: {} into {}".format(
|
||||||
|
Facility.objects.get(id=kwargs["ids"]),
|
||||||
|
Facility.objects.get(id=kwargs["target"]))
|
||||||
|
|
||||||
|
def set_arguments(self, form_data):
|
||||||
|
self.kwargs = {"clt": form_data.get("merge", EmptyId()).id}
|
284
peeringdb_server/api_cache.py
Normal file
284
peeringdb_server/api_cache.py
Normal file
@@ -0,0 +1,284 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from peeringdb_server.models import (InternetExchange, IXLan, Network)
|
||||||
|
|
||||||
|
import django_namespace_perms.util as nsp
|
||||||
|
|
||||||
|
|
||||||
|
class CacheRedirect(Exception):
|
||||||
|
"""
|
||||||
|
Raise this error to redirect to cache response during viewset.get_queryset
|
||||||
|
or viewset.list()
|
||||||
|
|
||||||
|
Argument should be an APICacheLoader instance
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, loader):
|
||||||
|
super(Exception, self).__init__(self, "Result to be loaded from cache")
|
||||||
|
self.loader = loader
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# API CACHE LOADER
|
||||||
|
|
||||||
|
|
||||||
|
class APICacheLoader(object):
|
||||||
|
"""
|
||||||
|
Checks if an API GET request qualifies for a cache load
|
||||||
|
and if it does allows you to provide the cached result
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, viewset, qset, filters):
|
||||||
|
request = viewset.request
|
||||||
|
self.request = request
|
||||||
|
self.qset = qset
|
||||||
|
self.filters = filters
|
||||||
|
self.model = viewset.model
|
||||||
|
self.viewset = viewset
|
||||||
|
self.depth = min(int(request.query_params.get("depth", 0)), 3)
|
||||||
|
self.limit = int(request.query_params.get("limit", 0))
|
||||||
|
self.skip = int(request.query_params.get("skip", 0))
|
||||||
|
self.since = int(request.query_params.get("since", 0))
|
||||||
|
self.fields = request.query_params.get("fields")
|
||||||
|
if self.fields:
|
||||||
|
self.fields = self.fields.split(",")
|
||||||
|
self.path = os.path.join(settings.API_CACHE_ROOT, "%s-%s.json" %
|
||||||
|
(viewset.model.handleref.tag, self.depth))
|
||||||
|
|
||||||
|
def qualifies(self):
|
||||||
|
"""
|
||||||
|
Check if request qualifies for a cache load
|
||||||
|
"""
|
||||||
|
|
||||||
|
# api cache use is disabled, no
|
||||||
|
if not getattr(settings, "API_CACHE_ENABLED", False):
|
||||||
|
return False
|
||||||
|
# no depth and a limit lower than 251 seems like a tipping point
|
||||||
|
# were non-cache retrieval is faster still
|
||||||
|
if not self.depth and self.limit and self.limit <= 250 and getattr(
|
||||||
|
settings, "API_CACHE_ALL_LIMITS", False) is False:
|
||||||
|
return False
|
||||||
|
# filters have been specified, no
|
||||||
|
if self.filters or self.since:
|
||||||
|
return False
|
||||||
|
# cache file non-existant, no
|
||||||
|
if not os.path.exists(self.path):
|
||||||
|
return False
|
||||||
|
# request method is anything but GET, no
|
||||||
|
if self.request.method != "GET":
|
||||||
|
return False
|
||||||
|
# primary key set in request, no
|
||||||
|
if self.viewset.kwargs:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
"""
|
||||||
|
Load the cached response according to tag and depth
|
||||||
|
"""
|
||||||
|
|
||||||
|
# read cache file
|
||||||
|
with open(self.path, "r") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
data = data.get("data")
|
||||||
|
|
||||||
|
# apply permissions to data
|
||||||
|
fnc = getattr(self, "apply_permissions_%s" % self.model.handleref.tag,
|
||||||
|
None)
|
||||||
|
if fnc:
|
||||||
|
data = fnc(data)
|
||||||
|
|
||||||
|
# apply pagination
|
||||||
|
if self.skip and self.limit:
|
||||||
|
data = data[self.skip:self.skip + self.limit]
|
||||||
|
elif self.skip:
|
||||||
|
data = data[self.skip:]
|
||||||
|
elif self.limit:
|
||||||
|
data = data[:self.limit]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"results": data,
|
||||||
|
"__meta": {
|
||||||
|
"generated": os.path.getmtime(self.path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def apply_permissions(self, ns, data, ruleset={}):
|
||||||
|
"""
|
||||||
|
Wrapper function to apply permissions to a data row and
|
||||||
|
return the sanitized result
|
||||||
|
"""
|
||||||
|
if type(ns) != list:
|
||||||
|
ns = ns.split(".")
|
||||||
|
|
||||||
|
# prepare ruleset
|
||||||
|
if ruleset:
|
||||||
|
_ruleset = {}
|
||||||
|
namespace_str = ".".join(ns)
|
||||||
|
for section, rules in ruleset.items():
|
||||||
|
_ruleset[section] = {}
|
||||||
|
for rule, perms in rules.items():
|
||||||
|
_ruleset[section]["%s.%s" % (namespace_str, rule)] = perms
|
||||||
|
ruleset = _ruleset
|
||||||
|
|
||||||
|
return nsp.dict_get_path(
|
||||||
|
nsp.permissions_apply(
|
||||||
|
nsp.dict_from_namespace(ns, data), self.request.user,
|
||||||
|
ruleset=ruleset), ns)
|
||||||
|
|
||||||
|
def apply_permissions_generic(self, data, explicit=False, join_ids=[],
|
||||||
|
**kwargs):
|
||||||
|
"""
|
||||||
|
Apply permissions to all rows according to rules
|
||||||
|
specified in parameters
|
||||||
|
|
||||||
|
explicit <function>
|
||||||
|
|
||||||
|
if explicit is passed as a function it will be called and the result will
|
||||||
|
determine whether or not explicit read perms are required for the row
|
||||||
|
|
||||||
|
join_ids [(target_id<str>, proxy_id<str>, model<handleref>), ..]
|
||||||
|
|
||||||
|
Since we are checking permissioning namespaces, and those namespaces may
|
||||||
|
consist of object ids that are not necessarily in the dataset you can
|
||||||
|
join those ids in via the join_ids parameter
|
||||||
|
"""
|
||||||
|
rv = []
|
||||||
|
|
||||||
|
joined_ids = {}
|
||||||
|
e = {}
|
||||||
|
inst = self.model()
|
||||||
|
|
||||||
|
# perform id joining
|
||||||
|
if join_ids:
|
||||||
|
for t, p, model in join_ids:
|
||||||
|
joined_ids[t] = {
|
||||||
|
"p": p,
|
||||||
|
"ids": self.join_ids(data, t, p, model,
|
||||||
|
joined_ids.get(p, e).get("ids",
|
||||||
|
e).values())
|
||||||
|
}
|
||||||
|
|
||||||
|
for row in data:
|
||||||
|
|
||||||
|
# create dict containing ids needed to build the permissioning
|
||||||
|
# namespace
|
||||||
|
init = dict([(k, row.get(v)) for k, v in kwargs.items()])
|
||||||
|
|
||||||
|
# joined ids
|
||||||
|
for t, j in joined_ids.items():
|
||||||
|
if j["p"] in row:
|
||||||
|
init[t] = j["ids"].get(row.get(j["p"]))
|
||||||
|
elif t in joined_ids:
|
||||||
|
init[t] = joined_ids.get(t).get("ids").get(init[j["p"]])
|
||||||
|
|
||||||
|
# build permissioning namespace
|
||||||
|
ns = self.model.nsp_namespace_from_id(**init).lower()
|
||||||
|
|
||||||
|
# apply fields filter
|
||||||
|
if self.fields:
|
||||||
|
for k in row.keys():
|
||||||
|
if k not in self.fields:
|
||||||
|
del row[k]
|
||||||
|
|
||||||
|
# determine whether or not read perms for this object need
|
||||||
|
# to be explicitly set
|
||||||
|
if explicit and callable(explicit):
|
||||||
|
expl = explicit(row)
|
||||||
|
else:
|
||||||
|
expl = False
|
||||||
|
|
||||||
|
# initial read perms check
|
||||||
|
if nsp.has_perms(self.request.user, ns, 0x01, explicit=expl):
|
||||||
|
ruleset = getattr(inst, "nsp_ruleset", {})
|
||||||
|
|
||||||
|
# apply permissions to tree
|
||||||
|
row = self.apply_permissions(ns, row, ruleset=ruleset)
|
||||||
|
|
||||||
|
# if row still has data aftewards, append to results
|
||||||
|
if row:
|
||||||
|
rv.append(row)
|
||||||
|
|
||||||
|
return rv
|
||||||
|
|
||||||
|
def join_ids(self, data, target_id, proxy_id, model, stash=[]):
|
||||||
|
"""
|
||||||
|
Returns a dict mapping of (proxy_id, target_id)
|
||||||
|
|
||||||
|
target ids are obtained by fetching instances of specified
|
||||||
|
model that match the supplied proxy ids
|
||||||
|
|
||||||
|
proxy ids will be gotten from data or stash
|
||||||
|
|
||||||
|
data [<dict>, ..] list of data rows from cache load, the field
|
||||||
|
name provided in "proxy_id" will be used to obtain the id from
|
||||||
|
each row
|
||||||
|
|
||||||
|
stash [<int>,..] list of ids
|
||||||
|
|
||||||
|
if stash is set, data and proxy_field will be ignored
|
||||||
|
"""
|
||||||
|
|
||||||
|
if stash:
|
||||||
|
ids = stash
|
||||||
|
else:
|
||||||
|
ids = [r[proxy_id] for r in data]
|
||||||
|
|
||||||
|
return dict([
|
||||||
|
(r["id"], r[target_id])
|
||||||
|
for r in model.objects.filter(id__in=ids).values("id", target_id)
|
||||||
|
])
|
||||||
|
|
||||||
|
# permissioning functions for each handlref type
|
||||||
|
|
||||||
|
def apply_permissions_org(self, data):
|
||||||
|
return self.apply_permissions_generic(data, id="id")
|
||||||
|
|
||||||
|
def apply_permissions_fac(self, data):
|
||||||
|
return self.apply_permissions_generic(data, fac_id="id",
|
||||||
|
org_id="org_id")
|
||||||
|
|
||||||
|
def apply_permissions_ix(self, data):
|
||||||
|
return self.apply_permissions_generic(data, ix_id="id",
|
||||||
|
org_id="org_id")
|
||||||
|
|
||||||
|
def apply_permissions_net(self, data):
|
||||||
|
return self.apply_permissions_generic(data, net_id="id",
|
||||||
|
org_id="org_id")
|
||||||
|
|
||||||
|
def apply_permissions_ixpfx(self, data):
|
||||||
|
return self.apply_permissions_generic(
|
||||||
|
data, join_ids=[("ix_id", "ixlan_id", IXLan),
|
||||||
|
("org_id", "ix_id",
|
||||||
|
InternetExchange)], ixlan_id="ixlan_id", id="id")
|
||||||
|
|
||||||
|
def apply_permissions_ixlan(self, data):
|
||||||
|
return self.apply_permissions_generic(
|
||||||
|
data, join_ids=[("org_id", "ix_id",
|
||||||
|
InternetExchange)], ix_id="ix_id", id="id")
|
||||||
|
|
||||||
|
def apply_permissions_ixfac(self, data):
|
||||||
|
return self.apply_permissions_generic(
|
||||||
|
data, join_ids=[("org_id", "ix_id",
|
||||||
|
InternetExchange)], ix_id="ix_id", id="id")
|
||||||
|
|
||||||
|
def apply_permissions_netfac(self, data):
|
||||||
|
return self.apply_permissions_generic(
|
||||||
|
data, join_ids=[("org_id", "net_id",
|
||||||
|
Network)], net_id="net_id", fac_id="fac_id")
|
||||||
|
|
||||||
|
def apply_permissions_netixlan(self, data):
|
||||||
|
return self.apply_permissions_generic(
|
||||||
|
data, join_ids=[("org_id", "net_id",
|
||||||
|
Network)], net_id="net_id", ixlan_id="ixlan_id")
|
||||||
|
|
||||||
|
def apply_permissions_poc(self, data):
|
||||||
|
return self.apply_permissions_generic(
|
||||||
|
data, explicit=lambda x: (x.get("visible") != "Public"),
|
||||||
|
join_ids=[("org_id", "net_id",
|
||||||
|
Network)], vis="visible", net_id="net_id")
|
9
peeringdb_server/apps.py
Normal file
9
peeringdb_server/apps.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class PeeringDBServerAppConfig(AppConfig):
|
||||||
|
name = "peeringdb_server"
|
||||||
|
verbose_name = "PeeringDB"
|
||||||
|
|
||||||
|
def ready(self):
|
||||||
|
import peeringdb_server.signals
|
153
peeringdb_server/autocomplete_views.py
Normal file
153
peeringdb_server/autocomplete_views.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
from django.db.models import Q
|
||||||
|
from django import http
|
||||||
|
from django.utils import html
|
||||||
|
from dal import autocomplete
|
||||||
|
from peeringdb_server.models import (InternetExchange, Facility,
|
||||||
|
NetworkFacility, InternetExchangeFacility,
|
||||||
|
Organization, IXLan, CommandLineTool)
|
||||||
|
|
||||||
|
from peeringdb_server.admin_commandline_tools import TOOL_MAP
|
||||||
|
|
||||||
|
|
||||||
|
class AutocompleteHTMLResponse(autocomplete.Select2QuerySetView):
|
||||||
|
def has_add_permissions(self, request):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def render_to_response(self, context):
|
||||||
|
q = self.request.GET.get('q', None)
|
||||||
|
return http.HttpResponse("".join(
|
||||||
|
[i.get("text") for i in self.get_results(context)]),
|
||||||
|
content_type="text/html")
|
||||||
|
|
||||||
|
|
||||||
|
class ExchangeAutocompleteJSON(autocomplete.Select2QuerySetView):
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = InternetExchange.objects.filter(status="ok")
|
||||||
|
if self.q:
|
||||||
|
qs = qs.filter(name__icontains=self.q)
|
||||||
|
qs = qs.order_by('name')
|
||||||
|
return qs
|
||||||
|
|
||||||
|
|
||||||
|
class ExchangeAutocomplete(AutocompleteHTMLResponse):
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = InternetExchange.objects.filter(status="ok")
|
||||||
|
if self.q:
|
||||||
|
qs = qs.filter(name__icontains=self.q)
|
||||||
|
qs = qs.order_by('name')
|
||||||
|
return qs
|
||||||
|
|
||||||
|
def get_result_label(self, item):
|
||||||
|
return u'<span data-value="%d"><div class="main">%s</div></span>' % (
|
||||||
|
item.pk, html.escape(item.name))
|
||||||
|
|
||||||
|
|
||||||
|
class FacilityAutocompleteJSON(autocomplete.Select2QuerySetView):
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = Facility.objects.filter(status="ok")
|
||||||
|
if self.q:
|
||||||
|
qs = qs.filter(name__icontains=self.q)
|
||||||
|
qs = qs.order_by('name')
|
||||||
|
return qs
|
||||||
|
|
||||||
|
|
||||||
|
class FacilityAutocomplete(AutocompleteHTMLResponse):
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = Facility.objects.filter(status="ok")
|
||||||
|
if self.q:
|
||||||
|
qs = qs.filter(
|
||||||
|
Q(name__icontains=self.q) | Q(address1__icontains=self.q))
|
||||||
|
qs = qs.order_by('name')
|
||||||
|
return qs
|
||||||
|
|
||||||
|
def get_result_label(self, item):
|
||||||
|
return u'<span data-value="%d"><div class="main">%s</div> <div class="sub">%s</div></span>' % (
|
||||||
|
item.pk, html.escape(item.name), html.escape(item.address1))
|
||||||
|
|
||||||
|
|
||||||
|
class FacilityAutocompleteForNetwork(FacilityAutocomplete):
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = super(FacilityAutocompleteForNetwork, self).get_queryset()
|
||||||
|
net_id = self.request.resolver_match.kwargs.get("net_id")
|
||||||
|
fac_ids = [
|
||||||
|
nf.facility_id
|
||||||
|
for nf in NetworkFacility.objects.filter(status="ok",
|
||||||
|
network_id=net_id)
|
||||||
|
]
|
||||||
|
qs = qs.exclude(id__in=fac_ids)
|
||||||
|
return qs
|
||||||
|
|
||||||
|
|
||||||
|
class FacilityAutocompleteForExchange(FacilityAutocomplete):
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = super(FacilityAutocompleteForExchange, self).get_queryset()
|
||||||
|
ix_id = self.request.resolver_match.kwargs.get("ix_id")
|
||||||
|
fac_ids = [
|
||||||
|
nf.facility_id
|
||||||
|
for nf in InternetExchangeFacility.objects.filter(
|
||||||
|
status="ok", ix_id=ix_id)
|
||||||
|
]
|
||||||
|
qs = qs.exclude(id__in=fac_ids)
|
||||||
|
return qs
|
||||||
|
|
||||||
|
|
||||||
|
class OrganizationAutocomplete(AutocompleteHTMLResponse):
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = Organization.objects.filter(status="ok")
|
||||||
|
if self.q:
|
||||||
|
qs = qs.filter(name__icontains=self.q)
|
||||||
|
qs = qs.order_by('name')
|
||||||
|
return qs
|
||||||
|
|
||||||
|
def get_result_label(self, item):
|
||||||
|
return u'<span data-value="%d"><div class="main">%s</div></span>' % (
|
||||||
|
item.pk, html.escape(item.name))
|
||||||
|
|
||||||
|
|
||||||
|
class IXLanAutocomplete(AutocompleteHTMLResponse):
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = IXLan.objects.filter(status="ok").select_related("ix")
|
||||||
|
if self.q:
|
||||||
|
qs = qs.filter(
|
||||||
|
Q(ix__name__icontains=self.q)
|
||||||
|
| Q(ix__name_long__icontains=self.q))
|
||||||
|
qs = qs.order_by('ix__name')
|
||||||
|
return qs
|
||||||
|
|
||||||
|
def get_result_label(self, item):
|
||||||
|
return u'<span data-value="%d"><div class="main">%s <div class="tiny suffix">%s</div></div> <div class="sub">%s</div> <div class="sub">%s</div></span>' % (
|
||||||
|
item.pk, html.escape(item.ix.name),
|
||||||
|
html.escape(item.ix.country.code), html.escape(item.ix.name_long),
|
||||||
|
html.escape(item.name))
|
||||||
|
|
||||||
|
|
||||||
|
class CommandLineToolHistoryAutocomplete(autocomplete.Select2QuerySetView):
|
||||||
|
"""
|
||||||
|
Autocomplete for command line tools that were ran via the admin ui
|
||||||
|
"""
|
||||||
|
tool = ""
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
# Only staff needs to be able to see these
|
||||||
|
if not self.request.user.is_staff:
|
||||||
|
return []
|
||||||
|
qs = CommandLineTool.objects.filter(
|
||||||
|
tool=self.tool).order_by("-created")
|
||||||
|
if self.q:
|
||||||
|
qs = qs.filter(description__icontains=self.q)
|
||||||
|
return qs
|
||||||
|
|
||||||
|
def get_result_label(self, item):
|
||||||
|
return (item.description or self.tool)
|
||||||
|
|
||||||
|
|
||||||
|
clt_history = {}
|
||||||
|
# class for each command line tool wrapper that we will map to an auto-complete
|
||||||
|
# url in urls.py
|
||||||
|
for tool_id, tool in TOOL_MAP.items():
|
||||||
|
|
||||||
|
class ToolHistory(CommandLineToolHistoryAutocomplete):
|
||||||
|
tool = tool_id
|
||||||
|
|
||||||
|
ToolHistory.__name__ = "CLT_{}_Autocomplete".format(tool_id)
|
||||||
|
clt_history[tool_id] = ToolHistory
|
158
peeringdb_server/data_views.py
Normal file
158
peeringdb_server/data_views.py
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
"""
|
||||||
|
This holds JSON views for various data sets,
|
||||||
|
|
||||||
|
Mostly these are needed for filling form-selects for editable
|
||||||
|
mode
|
||||||
|
"""
|
||||||
|
from django.contrib.auth.decorators import login_required
|
||||||
|
from django.http import JsonResponse
|
||||||
|
import django_countries
|
||||||
|
import models
|
||||||
|
import django_peeringdb.const as const
|
||||||
|
from peeringdb_server.models import Organization, Network
|
||||||
|
|
||||||
|
from django.utils import translation
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
#def _(x):
|
||||||
|
# return x
|
||||||
|
|
||||||
|
# until django-peeringdb is updated we want to remove
|
||||||
|
# the 100+ Gbps choice since it's redundant
|
||||||
|
const.TRAFFIC = [(k, i) for k, i in const.TRAFFIC if k != "100+ Gbps"]
|
||||||
|
|
||||||
|
# create enums without duplicate "Not Disclosed" choices
|
||||||
|
const.RATIOS_TRUNC = const.RATIOS[1:]
|
||||||
|
const.SCOPES_TRUNC = const.SCOPES[1:]
|
||||||
|
const.NET_TYPES_TRUNC = const.NET_TYPES[1:]
|
||||||
|
|
||||||
|
# create enums without duplicate "Not Disclosed" choices
|
||||||
|
# but with the one Not Disclosed choice combining both
|
||||||
|
# values in a comma separated fashion - user for
|
||||||
|
# advanced search
|
||||||
|
const.RATIOS_ADVS = list(const.RATIOS[1:])
|
||||||
|
const.RATIOS_ADVS[0] = (",%s" % const.RATIOS_ADVS[0][0],
|
||||||
|
const.RATIOS_ADVS[0][1])
|
||||||
|
const.SCOPES_ADVS = list(const.SCOPES[1:])
|
||||||
|
const.SCOPES_ADVS[0] = (",%s" % const.SCOPES_ADVS[0][0],
|
||||||
|
const.SCOPES_ADVS[0][1])
|
||||||
|
const.NET_TYPES_ADVS = list(const.NET_TYPES[1:])
|
||||||
|
const.NET_TYPES_ADVS[0] = (",%s" % const.NET_TYPES_ADVS[0][0],
|
||||||
|
const.NET_TYPES_ADVS[0][1])
|
||||||
|
|
||||||
|
const.ORG_GROUPS = (("member", "member"), ("admin", "admin"))
|
||||||
|
|
||||||
|
const.POC_ROLES = sorted(const.POC_ROLES, key=lambda x: x[1])
|
||||||
|
|
||||||
|
BOOL_CHOICE = ((False, _("No")), (True, _("Yes")))
|
||||||
|
const.BOOL_CHOICE_STR = (("False", _("No")), ("True", _("Yes")))
|
||||||
|
|
||||||
|
|
||||||
|
def countries_w_blank(request):
|
||||||
|
"""
|
||||||
|
Returns all valid countries and their country codes with a blank field
|
||||||
|
"""
|
||||||
|
|
||||||
|
return JsonResponse({
|
||||||
|
"countries_b": [{
|
||||||
|
"id": "",
|
||||||
|
"name": ""
|
||||||
|
}] + [{
|
||||||
|
"id": unicode(code),
|
||||||
|
"name": unicode(name)
|
||||||
|
} for code, name in list(django_countries.countries)]
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def countries(request):
|
||||||
|
"""
|
||||||
|
Returns all valid countries and their country codes
|
||||||
|
"""
|
||||||
|
|
||||||
|
return JsonResponse({
|
||||||
|
"countries": [{
|
||||||
|
"id": unicode(code),
|
||||||
|
"name": unicode(name)
|
||||||
|
} for code, name in list(django_countries.countries)]
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
def facilities(request):
|
||||||
|
"""
|
||||||
|
Returns all valid facilities with id and name
|
||||||
|
"""
|
||||||
|
|
||||||
|
return JsonResponse({
|
||||||
|
"facilities": [{
|
||||||
|
"id": fac.id,
|
||||||
|
"name": unicode(fac.name)
|
||||||
|
} for fac in models.Facility.handleref.all().undeleted()
|
||||||
|
.order_by("name")]
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def enum(request, name):
|
||||||
|
|
||||||
|
if name.upper() not in [
|
||||||
|
"RATIOS", "RATIOS_TRUNC", "RATIOS_ADVS", "TRAFFIC", "SCOPES",
|
||||||
|
"SCOPES_TRUNC", "SCOPES_ADVS", "NET_TYPES", "NET_TYPES_TRUNC",
|
||||||
|
"NET_TYPES_ADVS", "POLICY_GENERAL", "POLICY_LOCATIONS",
|
||||||
|
"POLICY_CONTRACTS", "REGIONS", "POC_ROLES", "MEDIA", "PROTOCOLS",
|
||||||
|
"ORG_GROUPS", "BOOL_CHOICE_STR", "VISIBILITY"
|
||||||
|
]:
|
||||||
|
raise Exception("Unknown enum")
|
||||||
|
|
||||||
|
return JsonResponse({
|
||||||
|
"enum/%s" % name: [{
|
||||||
|
"id": id,
|
||||||
|
"name": _(n)
|
||||||
|
} for id, n in getattr(const, name.upper())]
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def asns(request):
|
||||||
|
"""
|
||||||
|
Returns a JSON response with a list of asns that the user's
|
||||||
|
organizations own, to use for selecting asn in netixlan
|
||||||
|
creation
|
||||||
|
"""
|
||||||
|
rv = []
|
||||||
|
try:
|
||||||
|
net = Network.objects.get(id=request.GET.get("id"))
|
||||||
|
org = net.org
|
||||||
|
except Network.DoesNotExist:
|
||||||
|
return JsonResponse({"asns": []})
|
||||||
|
|
||||||
|
for net in org.net_set_active.order_by("asn"):
|
||||||
|
rv.append({"id": net.asn, "name": net.asn})
|
||||||
|
return JsonResponse({"asns": rv})
|
||||||
|
|
||||||
|
|
||||||
|
def organizations(request):
|
||||||
|
"""
|
||||||
|
Returns a JSON response with a list of organization names and ids
|
||||||
|
This is currently only used by the org-merge-tool which is only
|
||||||
|
available to site administrators.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not request.user.is_superuser:
|
||||||
|
return JsonResponse({}, status=403)
|
||||||
|
|
||||||
|
return JsonResponse({
|
||||||
|
"organizations": [{
|
||||||
|
"id": o.id,
|
||||||
|
"name": o.name
|
||||||
|
} for o in Organization.objects.filter(status="ok").order_by("name")]
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def languages(request):
|
||||||
|
from django.conf import settings
|
||||||
|
cur_language = translation.get_language()
|
||||||
|
return JsonResponse({
|
||||||
|
"locales": [{
|
||||||
|
"id": id,
|
||||||
|
"name": _(name)
|
||||||
|
} for (id, name) in settings.LANGUAGES]
|
||||||
|
})
|
22
peeringdb_server/db_router.py
Normal file
22
peeringdb_server/db_router.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
class DatabaseRouter(object):
|
||||||
|
"""
|
||||||
|
A very basic databases router that routes to a different
|
||||||
|
read and write db
|
||||||
|
"""
|
||||||
|
|
||||||
|
def db_for_read(self, model, **hints):
|
||||||
|
return "read"
|
||||||
|
|
||||||
|
def db_for_write(self, model, **hints):
|
||||||
|
return "default"
|
||||||
|
|
||||||
|
def allow_relation(self, obj1, obj2, **hints):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def allow_migrate(self, db, app_label, model_name=None, **hints):
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class TestRouter(DatabaseRouter):
|
||||||
|
def db_for_read(self, model, **hints):
|
||||||
|
return "default"
|
184
peeringdb_server/deskpro.py
Normal file
184
peeringdb_server/deskpro.py
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
"""
|
||||||
|
DeskPro API Client
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from django.template import loader
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from peeringdb_server.models import DeskProTicket
|
||||||
|
from peeringdb_server.inet import RdapNotFoundError
|
||||||
|
|
||||||
|
|
||||||
|
def ticket_queue(subject, body, user):
|
||||||
|
""" queue a deskpro ticket for creation """
|
||||||
|
|
||||||
|
ticket = DeskProTicket.objects.create(subject=u"{}{}".format(
|
||||||
|
settings.EMAIL_SUBJECT_PREFIX, subject), body=body, user=user)
|
||||||
|
|
||||||
|
|
||||||
|
class APIError(IOError):
|
||||||
|
def __init__(self, msg, data):
|
||||||
|
super(APIError, self).__init__(msg)
|
||||||
|
self.data = data
|
||||||
|
|
||||||
|
|
||||||
|
def ticket_queue_asnauto_skipvq(user, org, net, rir_data):
|
||||||
|
"""
|
||||||
|
queue deskro ticket creation for asn automation action: skip vq
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(net, dict):
|
||||||
|
net_name = net.get("name")
|
||||||
|
else:
|
||||||
|
net_name = net.name
|
||||||
|
|
||||||
|
if isinstance(org, dict):
|
||||||
|
org_name = org.get("name")
|
||||||
|
else:
|
||||||
|
org_name = org.name
|
||||||
|
|
||||||
|
ticket_queue("[ASNAUTO] Network '%s' approved for existing Org '%s'" %
|
||||||
|
(net_name, org_name),
|
||||||
|
loader.get_template(
|
||||||
|
'email/notify-pdb-admin-asnauto-skipvq.txt').render({
|
||||||
|
"user": user,
|
||||||
|
"org": org,
|
||||||
|
"net": net,
|
||||||
|
"rir_data": rir_data
|
||||||
|
}), user)
|
||||||
|
|
||||||
|
|
||||||
|
def ticket_queue_asnauto_affil(user, org, net, rir_data):
|
||||||
|
"""
|
||||||
|
queue deskro ticket creation for asn automation action: affil
|
||||||
|
"""
|
||||||
|
|
||||||
|
ticket_queue(
|
||||||
|
"[ASNAUTO] Ownership claim granted to Org '%s' for user '%s'" %
|
||||||
|
(org.name, user.username),
|
||||||
|
loader.get_template('email/notify-pdb-admin-asnauto-affil.txt').render(
|
||||||
|
{
|
||||||
|
"user": user,
|
||||||
|
"org": org,
|
||||||
|
"net": net,
|
||||||
|
"rir_data": rir_data
|
||||||
|
}), user)
|
||||||
|
|
||||||
|
|
||||||
|
def ticket_queue_asnauto_create(user, org, net, rir_data, asn,
|
||||||
|
org_created=False, net_created=False):
|
||||||
|
"""
|
||||||
|
queue deskro ticket creation for asn automation action: create
|
||||||
|
"""
|
||||||
|
|
||||||
|
subject = []
|
||||||
|
|
||||||
|
if org_created:
|
||||||
|
subject.append("Organization '%s'" % org.name)
|
||||||
|
if net_created:
|
||||||
|
subject.append("Network '%s'" % net.name)
|
||||||
|
|
||||||
|
if not subject:
|
||||||
|
return
|
||||||
|
subject = ", ".join(subject)
|
||||||
|
|
||||||
|
ticket_queue(
|
||||||
|
"[ASNAUTO] %s created" % subject,
|
||||||
|
loader.get_template(
|
||||||
|
'email/notify-pdb-admin-asnauto-entity-creation.txt').render({
|
||||||
|
"user": user,
|
||||||
|
"org": org,
|
||||||
|
"net": net,
|
||||||
|
"asn": asn,
|
||||||
|
"org_created": org_created,
|
||||||
|
"net_created": net_created,
|
||||||
|
"rir_data": rir_data
|
||||||
|
}), user)
|
||||||
|
|
||||||
|
|
||||||
|
def ticket_queue_rdap_error(user, asn, error):
|
||||||
|
if isinstance(error, RdapNotFoundError):
|
||||||
|
return
|
||||||
|
error_message = "{}".format(error)
|
||||||
|
|
||||||
|
if re.match("(.+) returned 400", error_message):
|
||||||
|
return
|
||||||
|
|
||||||
|
subject = "[RDAP_ERR] {} - AS{}".format(user.username, asn)
|
||||||
|
ticket_queue(
|
||||||
|
subject,
|
||||||
|
loader.get_template('email/notify-pdb-admin-rdap-error.txt').render({
|
||||||
|
"user": user,
|
||||||
|
"asn": asn,
|
||||||
|
"error_details": error_message
|
||||||
|
}), user)
|
||||||
|
|
||||||
|
|
||||||
|
class APIClient(object):
|
||||||
|
def __init__(self, url, key):
|
||||||
|
self.key = key
|
||||||
|
self.url = url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auth_headers(self):
|
||||||
|
return {"Authorization": "key {}".format(self.key)}
|
||||||
|
|
||||||
|
def parse_response(self, response, many=False):
|
||||||
|
r_json = response.json()
|
||||||
|
if "status" in r_json:
|
||||||
|
if r_json["status"] >= 400:
|
||||||
|
raise APIError(r_json["message"], r_json)
|
||||||
|
else:
|
||||||
|
response.raise_for_status()
|
||||||
|
data = r_json["data"]
|
||||||
|
if isinstance(data, list):
|
||||||
|
if many:
|
||||||
|
return r_json["data"]
|
||||||
|
elif data:
|
||||||
|
return data[0]
|
||||||
|
else:
|
||||||
|
return data
|
||||||
|
|
||||||
|
def get(self, endpoint, param):
|
||||||
|
response = requests.get("{}/{}".format(self.url, endpoint),
|
||||||
|
params=param, headers=self.auth_headers)
|
||||||
|
return self.parse_response(response)
|
||||||
|
|
||||||
|
def create(self, endpoint, param):
|
||||||
|
response = requests.post("{}/{}".format(self.url, endpoint),
|
||||||
|
json=param, headers=self.auth_headers)
|
||||||
|
return self.parse_response(response)
|
||||||
|
|
||||||
|
def require_person(self, user):
|
||||||
|
person = self.get("people", {"primary_email": user.email})
|
||||||
|
if not person:
|
||||||
|
person = self.create(
|
||||||
|
"people", {
|
||||||
|
"primary_email": user.email,
|
||||||
|
"first_name": user.first_name,
|
||||||
|
"last_name": user.last_name,
|
||||||
|
"name": user.full_name
|
||||||
|
})
|
||||||
|
|
||||||
|
return person
|
||||||
|
|
||||||
|
def create_ticket(self, ticket):
|
||||||
|
person = self.require_person(ticket.user)
|
||||||
|
ticket_response = self.create(
|
||||||
|
"tickets", {
|
||||||
|
"subject": ticket.subject,
|
||||||
|
"person": {
|
||||||
|
"id": person["id"]
|
||||||
|
},
|
||||||
|
"status": "awaiting_agent"
|
||||||
|
})
|
||||||
|
|
||||||
|
self.create(
|
||||||
|
"tickets/{}/messages".format(ticket_response["id"]), {
|
||||||
|
"message": ticket.body.replace("\n", "<br />\n"),
|
||||||
|
"person": person["id"],
|
||||||
|
"format": "html"
|
||||||
|
})
|
366
peeringdb_server/export_views.py
Normal file
366
peeringdb_server/export_views.py
Normal file
@@ -0,0 +1,366 @@
|
|||||||
|
import json
|
||||||
|
import datetime
|
||||||
|
import urllib
|
||||||
|
import csv
|
||||||
|
import StringIO
|
||||||
|
import collections
|
||||||
|
|
||||||
|
from django.http import JsonResponse, HttpResponse
|
||||||
|
from django.views import View
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
from rest_framework.test import APIRequestFactory
|
||||||
|
from peeringdb_server.models import (IXLan, NetworkIXLan, InternetExchange)
|
||||||
|
from peeringdb_server.rest import (
|
||||||
|
REFTAG_MAP as RestViewSets, )
|
||||||
|
from peeringdb_server.renderers import JSONEncoder
|
||||||
|
|
||||||
|
|
||||||
|
def export_ixf_ix_members(ixlans, pretty=False):
|
||||||
|
member_list = []
|
||||||
|
ixp_list = []
|
||||||
|
|
||||||
|
for ixlan in ixlans:
|
||||||
|
if ixlan.ix not in ixp_list:
|
||||||
|
ixp_list.append(ixlan.ix)
|
||||||
|
|
||||||
|
rv = {
|
||||||
|
"version": "0.6",
|
||||||
|
"timestamp": datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||||
|
"member_list": member_list,
|
||||||
|
"ixp_list": [{
|
||||||
|
"ixp_id": ixp.id,
|
||||||
|
"shortname": ixp.name
|
||||||
|
} for ixp in ixp_list]
|
||||||
|
}
|
||||||
|
|
||||||
|
for ixlan in ixlans:
|
||||||
|
asns = []
|
||||||
|
for netixlan in ixlan.netixlan_set_active.all():
|
||||||
|
if netixlan.asn in asns:
|
||||||
|
continue
|
||||||
|
connection_list = []
|
||||||
|
member = {
|
||||||
|
"asnum": netixlan.asn,
|
||||||
|
"member_type": "peering",
|
||||||
|
"name": netixlan.network.name,
|
||||||
|
"url": netixlan.network.website,
|
||||||
|
"contact_email": [
|
||||||
|
poc.email
|
||||||
|
for poc in netixlan.network.poc_set_active.filter(
|
||||||
|
visible="Public")
|
||||||
|
],
|
||||||
|
"contact_phone": [
|
||||||
|
poc.phone
|
||||||
|
for poc in netixlan.network.poc_set_active.filter(
|
||||||
|
visible="Public")
|
||||||
|
],
|
||||||
|
"peering_policy": netixlan.network.policy_general.lower(),
|
||||||
|
"peering_policy_url": netixlan.network.policy_url,
|
||||||
|
"connection_list": connection_list
|
||||||
|
}
|
||||||
|
member_list.append(member)
|
||||||
|
asns.append(netixlan.asn)
|
||||||
|
for _netixlan in ixlan.netixlan_set_active.filter(
|
||||||
|
asn=netixlan.asn):
|
||||||
|
vlan_list = [{}]
|
||||||
|
connection = {
|
||||||
|
"ixp_id": _netixlan.ixlan.ix_id,
|
||||||
|
"state": "active",
|
||||||
|
"if_list": [{
|
||||||
|
"if_speed": _netixlan.speed
|
||||||
|
}],
|
||||||
|
"vlan_list": vlan_list
|
||||||
|
}
|
||||||
|
connection_list.append(connection)
|
||||||
|
|
||||||
|
if netixlan.ipaddr4:
|
||||||
|
vlan_list[0]["ipv4"] = {
|
||||||
|
"address": "{}".format(netixlan.ipaddr4),
|
||||||
|
"routeserver": netixlan.is_rs_peer,
|
||||||
|
"max_prefix": netixlan.network.info_prefixes4,
|
||||||
|
"as_macro": netixlan.network.irr_as_set
|
||||||
|
}
|
||||||
|
if netixlan.ipaddr6:
|
||||||
|
vlan_list[0]["ipv6"] = {
|
||||||
|
"address": "{}".format(netixlan.ipaddr6),
|
||||||
|
"routeserver": netixlan.is_rs_peer,
|
||||||
|
"max_prefix": netixlan.network.info_prefixes6,
|
||||||
|
"as_macro": netixlan.network.irr_as_set
|
||||||
|
}
|
||||||
|
|
||||||
|
if pretty:
|
||||||
|
return json.dumps(rv, indent=2)
|
||||||
|
else:
|
||||||
|
return json.dumps(rv)
|
||||||
|
|
||||||
|
|
||||||
|
def view_export_ixf_ix_members(request, ix_id):
|
||||||
|
return HttpResponse(
|
||||||
|
export_ixf_ix_members(
|
||||||
|
IXLan.objects.filter(ix_id=ix_id, status="ok"),
|
||||||
|
pretty=request.GET.has_key("pretty")),
|
||||||
|
content_type="application/json")
|
||||||
|
|
||||||
|
|
||||||
|
def view_export_ixf_ixlan_members(request, ixlan_id):
|
||||||
|
return HttpResponse(
|
||||||
|
export_ixf_ix_members(
|
||||||
|
IXLan.objects.filter(id=ixlan_id, status="ok"),
|
||||||
|
pretty=request.GET.has_key("pretty")),
|
||||||
|
content_type="application/json")
|
||||||
|
|
||||||
|
|
||||||
|
class ExportView(View):
|
||||||
|
"""
|
||||||
|
Base class for more complex data exports
|
||||||
|
"""
|
||||||
|
|
||||||
|
# supported export fortmats
|
||||||
|
formats = ["json", "json_pretty", "csv"]
|
||||||
|
|
||||||
|
# when exporting json data, if this is it not None
|
||||||
|
# json data will be wrapped in one additional dict
|
||||||
|
# and referenced at a key with the specified name
|
||||||
|
json_root_key = "data"
|
||||||
|
|
||||||
|
# exporting data should send file attachment headers
|
||||||
|
download = True
|
||||||
|
|
||||||
|
# if download=True this value will be used to specify
|
||||||
|
# the filename of the downloaded file
|
||||||
|
download_name = "export.{extension}"
|
||||||
|
|
||||||
|
# format to file extension translation table
|
||||||
|
extensions = {"csv": "csv", "json": "json", "json_pretty": "json"}
|
||||||
|
|
||||||
|
def get(self, request, fmt):
|
||||||
|
fmt = fmt.replace("-", "_")
|
||||||
|
if fmt not in self.formats:
|
||||||
|
raise ValueError(_("Invalid export format"))
|
||||||
|
try:
|
||||||
|
response_handler = getattr(self, "response_{}".format(fmt))
|
||||||
|
response = response_handler(self.generate(request))
|
||||||
|
|
||||||
|
if self.download == True:
|
||||||
|
# send attachment header, triggering download on the client side
|
||||||
|
filename = self.download_name.format(
|
||||||
|
extension=self.extensions.get(fmt))
|
||||||
|
response[
|
||||||
|
'Content-Disposition'] = 'attachment; filename="{}"'.format(
|
||||||
|
filename)
|
||||||
|
return response
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
return JsonResponse({"non_field_errors": [str(exc)]}, status=400)
|
||||||
|
|
||||||
|
def generate(self, request):
|
||||||
|
"""
|
||||||
|
Function that generates export data from request
|
||||||
|
|
||||||
|
Override this
|
||||||
|
"""
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def response_json(self, data):
|
||||||
|
"""
|
||||||
|
Return Response object for normal json response
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- data <list|dict>: serializable data, if list is passed you will need
|
||||||
|
to specify a value in self.json_root_key
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- JsonResponse
|
||||||
|
"""
|
||||||
|
if self.json_root_key:
|
||||||
|
data = {self.json_root_key: data}
|
||||||
|
return JsonResponse(data, encoder=JSONEncoder)
|
||||||
|
|
||||||
|
def response_json_pretty(self, data):
|
||||||
|
"""
|
||||||
|
Returns Response object for pretty (indented) json response
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- data <list|dict>: serializable data, if list is passed tou will need
|
||||||
|
to specify a value in self.json_root_key
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- HttpResponse: http response with appropriate json headers, cannot use
|
||||||
|
JsonResponse here because we need to specify indent level
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.json_root_key:
|
||||||
|
data = {self.json_root_key: data}
|
||||||
|
return HttpResponse(
|
||||||
|
json.dumps(data, indent=2, cls=JSONEncoder),
|
||||||
|
content_type="application/json")
|
||||||
|
|
||||||
|
def response_csv(self, data):
|
||||||
|
"""
|
||||||
|
Returns Response object for CSV response
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- data <list>
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- HttpResponse
|
||||||
|
"""
|
||||||
|
if not data:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
response = HttpResponse(content_type="text/csv")
|
||||||
|
csv_writer = csv.DictWriter(response, fieldnames=data[0].keys())
|
||||||
|
|
||||||
|
csv_writer.writeheader()
|
||||||
|
|
||||||
|
for row in data:
|
||||||
|
for k, v in row.items():
|
||||||
|
if isinstance(v, unicode):
|
||||||
|
row[k] = v.encode("utf-8")
|
||||||
|
csv_writer.writerow(row)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class AdvancedSearchExportView(ExportView):
|
||||||
|
"""
|
||||||
|
Allows exporting of advanced search result data
|
||||||
|
"""
|
||||||
|
|
||||||
|
tag = None
|
||||||
|
json_root_key = "results"
|
||||||
|
download_name = "advanced_search_export.{extension}"
|
||||||
|
|
||||||
|
def fetch(self, request):
|
||||||
|
"""
|
||||||
|
Fetch data from api according to GET parameters
|
||||||
|
|
||||||
|
Note that `limit` and `depth` will be overwritten, other api
|
||||||
|
parameters will be passed along as-is
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- dict: un-rendered dataset returned by api
|
||||||
|
"""
|
||||||
|
params = request.GET.dict()
|
||||||
|
params["limit"] = 250
|
||||||
|
params["depth"] = 1
|
||||||
|
|
||||||
|
# prepare api request
|
||||||
|
request_factory = APIRequestFactory()
|
||||||
|
viewset = RestViewSets[self.tag].as_view({"get": "list"})
|
||||||
|
|
||||||
|
api_request = request_factory.get("/api/{}/?{}".format(
|
||||||
|
self.tag, urllib.urlencode(params)))
|
||||||
|
|
||||||
|
# we want to use the same user as the original request
|
||||||
|
# so permissions are applied correctly
|
||||||
|
api_request.user = request.user
|
||||||
|
|
||||||
|
response = viewset(api_request)
|
||||||
|
|
||||||
|
return response.data
|
||||||
|
|
||||||
|
def get(self, request, tag, fmt):
|
||||||
|
"""
|
||||||
|
Handle export
|
||||||
|
"""
|
||||||
|
self.tag = tag
|
||||||
|
return super(AdvancedSearchExportView, self).get(request, fmt)
|
||||||
|
|
||||||
|
def generate(self, request):
|
||||||
|
"""
|
||||||
|
Generate data for the reftag specified in self.tag
|
||||||
|
|
||||||
|
This functions will call generate_<tag> and return the result
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- request <Request>
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- list: list containing rendered data rows ready for export
|
||||||
|
"""
|
||||||
|
if self.tag not in ["net", "ix", "fac"]:
|
||||||
|
raise ValueError(_("Invalid tag"))
|
||||||
|
data_function = getattr(self, "generate_{}".format(self.tag))
|
||||||
|
return data_function(request)
|
||||||
|
|
||||||
|
def generate_net(self, request):
|
||||||
|
"""
|
||||||
|
Fetch network data from the api according to request and then render
|
||||||
|
it ready for export
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- request <Request>
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- list: list containing rendered data ready for export
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = self.fetch(request)
|
||||||
|
download_data = []
|
||||||
|
for row in data:
|
||||||
|
download_data.append(
|
||||||
|
collections.OrderedDict([
|
||||||
|
("Name", row["name"]),
|
||||||
|
("Also known as", row["aka"]),
|
||||||
|
("ASN", row["asn"]),
|
||||||
|
("General Policy", row["policy_general"]),
|
||||||
|
("Network Type", row["info_type"]),
|
||||||
|
("Network Scope", row["info_scope"]),
|
||||||
|
("Traffic Levels", row["info_traffic"]),
|
||||||
|
("Traffic Ratio", row["info_ratio"]),
|
||||||
|
("Exchanges", len(row["netixlan_set"])),
|
||||||
|
("Facilities", len(row["netfac_set"])),
|
||||||
|
]))
|
||||||
|
return download_data
|
||||||
|
|
||||||
|
def generate_fac(self, request):
|
||||||
|
"""
|
||||||
|
Fetch facility data from the api according to request and then render
|
||||||
|
it ready for export
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- request <Request>
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- list: list containing rendered data ready for export
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = self.fetch(request)
|
||||||
|
download_data = []
|
||||||
|
for row in data:
|
||||||
|
download_data.append(
|
||||||
|
collections.OrderedDict(
|
||||||
|
[("Name", row["name"]), ("Management", row["org_name"]),
|
||||||
|
("CLLI", row["clli"]), ("NPA-NXX", row["npanxx"]),
|
||||||
|
("City", row["city"]), ("Country", row["country"]),
|
||||||
|
("State",
|
||||||
|
row["state"]), ("Postal Code",
|
||||||
|
row["zipcode"]), ("Networks",
|
||||||
|
row["net_count"])]))
|
||||||
|
return download_data
|
||||||
|
|
||||||
|
def generate_ix(self, request):
|
||||||
|
"""
|
||||||
|
Fetch exchange data from the api according to request and then render
|
||||||
|
it ready for export
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- request <Request>
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- list: list containing rendered data ready for export
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = self.fetch(request)
|
||||||
|
download_data = []
|
||||||
|
for row in data:
|
||||||
|
download_data.append(
|
||||||
|
collections.OrderedDict([
|
||||||
|
("Name", row["name"]),
|
||||||
|
("Media Type", row["media"]),
|
||||||
|
("Country", row["country"]),
|
||||||
|
("City", row["city"]),
|
||||||
|
]))
|
||||||
|
return download_data
|
115
peeringdb_server/forms.py
Normal file
115
peeringdb_server/forms.py
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
import re
|
||||||
|
from peeringdb_server.models import User, Organization
|
||||||
|
from django.contrib.auth import forms as auth_forms
|
||||||
|
from django import forms
|
||||||
|
from django_namespace_perms.constants import *
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
|
||||||
|
class OrgAdminUserPermissionForm(forms.Form):
|
||||||
|
|
||||||
|
entity = forms.CharField()
|
||||||
|
perms = forms.IntegerField()
|
||||||
|
|
||||||
|
def clean_perms(self):
|
||||||
|
perms = self.cleaned_data.get("perms")
|
||||||
|
if not perms & PERM_READ:
|
||||||
|
perms = perms | PERM_READ
|
||||||
|
if perms & PERM_DENY:
|
||||||
|
perms = perms ^ PERM_DENY
|
||||||
|
if perms > PERM_CRUD or perms < PERM_READ:
|
||||||
|
raise forms.ValidationError(_("Invalid permission level"))
|
||||||
|
return perms
|
||||||
|
|
||||||
|
|
||||||
|
class AffiliateToOrgForm(forms.Form):
|
||||||
|
|
||||||
|
asn = forms.CharField(required=False)
|
||||||
|
org = forms.CharField(required=False)
|
||||||
|
|
||||||
|
def clean_org(self):
|
||||||
|
org_id = self.cleaned_data.get("org")
|
||||||
|
if not org_id:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# if org id can be inted, an existing org id has been submitted
|
||||||
|
# otherwise an org name has been submitted that may or may not exist
|
||||||
|
try:
|
||||||
|
org_id = int(org_id)
|
||||||
|
if not Organization.objects.filter(id=org_id).exists():
|
||||||
|
if self.cleaned_data.get("asn"):
|
||||||
|
return 0
|
||||||
|
except ValueError:
|
||||||
|
try:
|
||||||
|
org = Organization.objects.get(name=org_id)
|
||||||
|
return org.id
|
||||||
|
except Organization.DoesNotExist:
|
||||||
|
self.cleaned_data["org_name"] = org_id
|
||||||
|
return 0
|
||||||
|
|
||||||
|
return org_id
|
||||||
|
|
||||||
|
def clean_asn(self):
|
||||||
|
asn = self.cleaned_data.get("asn")
|
||||||
|
if not asn:
|
||||||
|
return 0
|
||||||
|
try:
|
||||||
|
asn = int(re.sub("\D", "", asn))
|
||||||
|
except ValueError:
|
||||||
|
raise forms.ValidationError(_("ASN needs to be a number"))
|
||||||
|
return asn
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordChangeForm(forms.Form):
|
||||||
|
password = forms.CharField()
|
||||||
|
password_v = forms.CharField()
|
||||||
|
|
||||||
|
def clean_password(self):
|
||||||
|
password = self.cleaned_data.get("password")
|
||||||
|
if len(password) < 10:
|
||||||
|
raise forms.ValidationError(
|
||||||
|
_("Needs to be at least 10 characters long"))
|
||||||
|
return password
|
||||||
|
|
||||||
|
def clean_password_v(self):
|
||||||
|
password = self.cleaned_data.get("password")
|
||||||
|
password_v = self.cleaned_data.get("password_v")
|
||||||
|
|
||||||
|
if password != password_v:
|
||||||
|
raise forms.ValidationError(
|
||||||
|
_("Passwords need to match"), code="password_mismatch")
|
||||||
|
return password_v
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordResetForm(forms.Form):
|
||||||
|
email = forms.EmailField()
|
||||||
|
|
||||||
|
|
||||||
|
class UsernameRetrieveForm(forms.Form):
|
||||||
|
email = forms.EmailField()
|
||||||
|
|
||||||
|
|
||||||
|
class UserCreationForm(auth_forms.UserCreationForm):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = (
|
||||||
|
"username",
|
||||||
|
"email",
|
||||||
|
"first_name",
|
||||||
|
"last_name",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UserLocaleForm(forms.Form):
|
||||||
|
locale = forms.CharField()
|
||||||
|
|
||||||
|
def clean_locale(self):
|
||||||
|
loc = self.cleaned_data.get("locale")
|
||||||
|
# django.utils.translation.check_for_language() #lang_code
|
||||||
|
if loc:
|
||||||
|
return loc
|
||||||
|
return None
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = ('locale')
|
95
peeringdb_server/inet.py
Normal file
95
peeringdb_server/inet.py
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
import ipaddress
|
||||||
|
import re
|
||||||
|
|
||||||
|
import rdap
|
||||||
|
from rdap import RdapAsn
|
||||||
|
from rdap.exceptions import RdapException, RdapHTTPError, RdapNotFoundError
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from peeringdb_server import settings
|
||||||
|
|
||||||
|
|
||||||
|
class RdapLookup(rdap.RdapClient):
|
||||||
|
"""
|
||||||
|
Does RDAP lookups against defined URL.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# create rdap config
|
||||||
|
config = dict(
|
||||||
|
bootstrap_url=settings.RDAP_URL.rstrip('/'),
|
||||||
|
lacnic_apikey=settings.RDAP_LACNIC_APIKEY,
|
||||||
|
)
|
||||||
|
super(RdapLookup, self).__init__(config)
|
||||||
|
|
||||||
|
|
||||||
|
def network_is_bogon(network):
|
||||||
|
"""
|
||||||
|
Returns if the passed ipaddress network is a bogon
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- network <ipaddress.IPv4Network|ipaddress.IPv6Network>
|
||||||
|
|
||||||
|
Return:
|
||||||
|
- bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
return not network.is_global or network.is_reserved
|
||||||
|
|
||||||
|
|
||||||
|
def network_is_pdb_valid(network):
|
||||||
|
"""
|
||||||
|
Return if the passed ipaddress network is in pdb valid
|
||||||
|
address space
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- network <ipaddress.IPv4Network|ipaddress.IPv6Network>
|
||||||
|
|
||||||
|
Return:
|
||||||
|
- bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
if network.is_multicast or network_is_bogon(network):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if network.version == 4:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# not allowed v6 blocks
|
||||||
|
v6_invalid = [
|
||||||
|
# 2002::/16 - RFC 3068 - 6to4 prefix
|
||||||
|
0x2002,
|
||||||
|
# 3ffe::/16 - RFC 5156 - used for the 6bone but was returned
|
||||||
|
0x3ffe,
|
||||||
|
# fec0::/10 - RFC 4291 - Reserved by IETF
|
||||||
|
0xfec0,
|
||||||
|
# ff00::/8 - RFC 4291 - Multicast
|
||||||
|
0xff00,
|
||||||
|
]
|
||||||
|
|
||||||
|
if int(network.network_address) >> 112 in v6_invalid:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def get_prefix_protocol(prefix):
|
||||||
|
"""
|
||||||
|
Takes a network address space prefix string and returns
|
||||||
|
a string describing the protocol
|
||||||
|
|
||||||
|
Will raise a ValueError if it cannot determine protocol
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: IPv4 or IPv6
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
ipaddress.IPv4Network(prefix)
|
||||||
|
return "IPv4"
|
||||||
|
except ipaddress.AdressValueError:
|
||||||
|
try:
|
||||||
|
ipaddress.IPv6Network(prefix)
|
||||||
|
return "IPv6"
|
||||||
|
except ipaddress.AddessValueError:
|
||||||
|
raise ValueErrror("Prefix invalid")
|
83
peeringdb_server/mail.py
Normal file
83
peeringdb_server/mail.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
from django.core.mail.message import EmailMultiAlternatives
|
||||||
|
from django.conf import settings
|
||||||
|
from django.template import loader
|
||||||
|
from django.utils.html import strip_tags
|
||||||
|
from django.utils.translation import ugettext_lazy as _, override
|
||||||
|
|
||||||
|
|
||||||
|
def mail_admins_with_from(subj, msg, from_addr, fail_silently=False,
|
||||||
|
connection=None, html_message=None):
|
||||||
|
"""
|
||||||
|
mail admins but allow specifying of from address
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not settings.ADMINS:
|
||||||
|
return
|
||||||
|
|
||||||
|
# set plain text message
|
||||||
|
msg_raw = strip_tags(msg)
|
||||||
|
mail = EmailMultiAlternatives(
|
||||||
|
"%s%s" % (settings.EMAIL_SUBJECT_PREFIX, subj), msg, from_addr,
|
||||||
|
[a[1] for a in settings.ADMINS], connection=connection)
|
||||||
|
|
||||||
|
# attach html message
|
||||||
|
mail.attach_alternative(msg.replace("\n", "<br />\n"), "text/html")
|
||||||
|
|
||||||
|
mail.send(fail_silently=fail_silently)
|
||||||
|
|
||||||
|
|
||||||
|
def mail_users_entity_merge(users_source, users_target, entity_source,
|
||||||
|
entity_target):
|
||||||
|
"""
|
||||||
|
notifies the users specified in users_source that their entity (entity_source) has
|
||||||
|
been merged with another entity (entity_target)
|
||||||
|
|
||||||
|
notifies the users specified in users_target that an entity has ben merged into their
|
||||||
|
entity (entity_target)
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- users_source <list>: list of User objects
|
||||||
|
- users_target <list>: list of User objects
|
||||||
|
- entity_source <HandleRef>: handleref object, entity that was merged
|
||||||
|
- entity_target <HandleRef>: handleref object, entity that was merged into
|
||||||
|
"""
|
||||||
|
msg = loader.get_template('email/notify-org-admin-merge.txt').render({
|
||||||
|
"entity_type_name": entity_source._meta.verbose_name.capitalize(),
|
||||||
|
"entity_source": entity_source,
|
||||||
|
"entity_target": entity_target,
|
||||||
|
"entity_target_url": "{}/{}/{}".format(
|
||||||
|
settings.BASE_URL, entity_target.ref_tag, entity_target.id),
|
||||||
|
"support_email": settings.DEFAULT_FROM_EMAIL
|
||||||
|
})
|
||||||
|
|
||||||
|
for user in set([u for u in users_source] + [u for u in users_target]):
|
||||||
|
#FIXME: why not have the `override` call in email_user in the first place?
|
||||||
|
with override(user.locale):
|
||||||
|
user.email_user(
|
||||||
|
_(u"{} Merge Notification: {} -> {}").format(
|
||||||
|
entity_source._meta.verbose_name.capitalize(),
|
||||||
|
entity_source.name, entity_target.name), msg)
|
||||||
|
|
||||||
|
|
||||||
|
def mail_username_retrieve(email, secret):
|
||||||
|
"""
|
||||||
|
Sends an email to the specified email address containing
|
||||||
|
the url for username retrieval.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- email <str>
|
||||||
|
- secret <str>: username retrieval secret in the user's session
|
||||||
|
"""
|
||||||
|
|
||||||
|
msg = loader.get_template('email/username-retrieve.txt').render({
|
||||||
|
"email": email,
|
||||||
|
"secret": secret,
|
||||||
|
"username_retrieve_url": "{}/username-retrieve/complete?secret={}"
|
||||||
|
.format(settings.BASE_URL, secret)
|
||||||
|
})
|
||||||
|
|
||||||
|
subject = "PeeringDB username retrieval"
|
||||||
|
|
||||||
|
mail = EmailMultiAlternatives(subject, msg, settings.DEFAULT_FROM_EMAIL,
|
||||||
|
[email])
|
||||||
|
mail.send(fail_silently=False)
|
0
peeringdb_server/management/__init__.py
Normal file
0
peeringdb_server/management/__init__.py
Normal file
0
peeringdb_server/management/commands/__init__.py
Normal file
0
peeringdb_server/management/commands/__init__.py
Normal file
77
peeringdb_server/management/commands/_db_command.py
Normal file
77
peeringdb_server/management/commands/_db_command.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
import json
|
||||||
|
import peeringdb_server.models as pdbm
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from reversion.models import Version, Revision
|
||||||
|
from optparse import make_option
|
||||||
|
|
||||||
|
MODELS = [
|
||||||
|
pdbm.Organization, pdbm.Network, pdbm.InternetExchange, pdbm.Facility,
|
||||||
|
pdbm.NetworkContact, pdbm.NetworkFacility, pdbm.IXLan, pdbm.IXLanPrefix,
|
||||||
|
pdbm.NetworkIXLan
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class DBCommand(BaseCommand):
|
||||||
|
args = "<reftag> <id, id, ...>"
|
||||||
|
help = "Inspect an object's reversion history"
|
||||||
|
|
||||||
|
def log(self, id, msg):
|
||||||
|
print "%s: %s" % (id, msg)
|
||||||
|
|
||||||
|
def print_line(self):
|
||||||
|
print "".join(["-" for i in range(0, 80)])
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
|
||||||
|
versions = Version.objects.all()
|
||||||
|
|
||||||
|
args = list(args)
|
||||||
|
|
||||||
|
try:
|
||||||
|
ref_tag = args.pop(0)
|
||||||
|
except IndexError:
|
||||||
|
print "Please specify object reftag (eg 'net') and at least one id"
|
||||||
|
return
|
||||||
|
|
||||||
|
if len(args) == 0:
|
||||||
|
print "Please specify at least one id"
|
||||||
|
return
|
||||||
|
|
||||||
|
ids = [int(i) for i in args]
|
||||||
|
|
||||||
|
model = None
|
||||||
|
for m in MODELS:
|
||||||
|
if m.handleref.tag == ref_tag:
|
||||||
|
model = m
|
||||||
|
break
|
||||||
|
|
||||||
|
if not model:
|
||||||
|
print "Unknown ref tag: %s" % ref_tag
|
||||||
|
return
|
||||||
|
|
||||||
|
content_type = ContentType.objects.get_for_model(model)
|
||||||
|
for id in ids:
|
||||||
|
versions = Version.objects.filter(content_type=content_type,
|
||||||
|
object_id_int=id)
|
||||||
|
print "%s - %d:" % (ref_tag, id)
|
||||||
|
self.print_line()
|
||||||
|
prev = {}
|
||||||
|
n = 0
|
||||||
|
for version in versions:
|
||||||
|
data = json.loads(version.serialized_data)[0].get("fields")
|
||||||
|
n += 1
|
||||||
|
print "VERSION: %d - %s - User: %s" % (n, data.get("updated"),
|
||||||
|
version.revision.user)
|
||||||
|
if not prev:
|
||||||
|
for k, v in data.items():
|
||||||
|
print "%s: '%s'" % (k, v)
|
||||||
|
self.print_line()
|
||||||
|
prev = data
|
||||||
|
continue
|
||||||
|
for k, v in data.items():
|
||||||
|
if prev[k] != v:
|
||||||
|
print "%s: '%s' => '%s'" % (k, prev[k], v)
|
||||||
|
|
||||||
|
prev = data
|
||||||
|
self.print_line()
|
118
peeringdb_server/management/commands/pdb_api_cache.py
Normal file
118
peeringdb_server/management/commands/pdb_api_cache.py
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
import os
|
||||||
|
import traceback
|
||||||
|
import peeringdb_server.models as pdbm
|
||||||
|
import peeringdb_server.rest as pdbr
|
||||||
|
import datetime
|
||||||
|
import time
|
||||||
|
from peeringdb_server.renderers import MetaJSONRenderer
|
||||||
|
from django.conf import settings
|
||||||
|
from optparse import make_option
|
||||||
|
from rest_framework.test import APIRequestFactory
|
||||||
|
|
||||||
|
MODELS = [
|
||||||
|
pdbm.Organization, pdbm.Network, pdbm.InternetExchange, pdbm.Facility,
|
||||||
|
pdbm.NetworkContact, pdbm.NetworkFacility, pdbm.IXLan, pdbm.IXLanPrefix,
|
||||||
|
pdbm.NetworkIXLan
|
||||||
|
]
|
||||||
|
|
||||||
|
VIEWSETS = {
|
||||||
|
"org": pdbr.OrganizationViewSet,
|
||||||
|
"net": pdbr.NetworkViewSet,
|
||||||
|
"ix": pdbr.InternetExchangeViewSet,
|
||||||
|
"fac": pdbr.FacilityViewSet,
|
||||||
|
"ixlan": pdbr.IXLanViewSet,
|
||||||
|
"ixfac": pdbr.InternetExchangeFacilityViewSet,
|
||||||
|
"ixpfx": pdbr.IXLanPrefixViewSet,
|
||||||
|
"netfac": pdbr.NetworkFacilityViewSet,
|
||||||
|
"netixlan": pdbr.NetworkIXLanViewSet,
|
||||||
|
"poc": pdbr.NetworkContactViewSet
|
||||||
|
}
|
||||||
|
|
||||||
|
settings.DEBUG = False
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Regen the api cache files"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("--only", action="store", default=False,
|
||||||
|
help="only run specified type")
|
||||||
|
parser.add_argument(
|
||||||
|
"--date", action="store", default=None, help=
|
||||||
|
"generate cache for objects create before or at the specified date (YYYYMMDD)"
|
||||||
|
)
|
||||||
|
|
||||||
|
def log(self, id, msg):
|
||||||
|
if self.log_file:
|
||||||
|
self.log_file.write("%s: %s" % (id, msg))
|
||||||
|
self.log_file.flush()
|
||||||
|
print "%s: %s" % (id, msg)
|
||||||
|
|
||||||
|
def row_datetime(self, row, field="created"):
|
||||||
|
return datetime.datetime.strptime(row.get(field), "%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
only = options.get('only', None)
|
||||||
|
date = options.get('date', None)
|
||||||
|
|
||||||
|
if only:
|
||||||
|
only = only.split(",")
|
||||||
|
|
||||||
|
if date:
|
||||||
|
dt = datetime.datetime.strptime(date, "%Y%m%d")
|
||||||
|
else:
|
||||||
|
dt = datetime.datetime.now()
|
||||||
|
dtstr = dt.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
self.log_file = open(settings.API_CACHE_LOG, "w+")
|
||||||
|
self.log("info",
|
||||||
|
"Regnerating cache files to '%s'" % settings.API_CACHE_ROOT)
|
||||||
|
self.log("info", "Caching data for timestamp: %s" % dtstr)
|
||||||
|
rf = APIRequestFactory()
|
||||||
|
renderer = MetaJSONRenderer()
|
||||||
|
|
||||||
|
t = time.time()
|
||||||
|
|
||||||
|
su = pdbm.User.objects.filter(is_superuser=True).first()
|
||||||
|
|
||||||
|
settings.API_DEPTH_ROW_LIMIT = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
cache = {}
|
||||||
|
|
||||||
|
for tag, viewset in VIEWSETS.items():
|
||||||
|
if only and tag not in only:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for depth in [0, 1, 2, 3]:
|
||||||
|
self.log(tag, "generating depth %d" % depth)
|
||||||
|
if depth:
|
||||||
|
req = rf.get('/api/%s?depth=%d&updated__lte=%s&_ctf' %
|
||||||
|
(tag, depth, dtstr))
|
||||||
|
else:
|
||||||
|
req = rf.get('/api/%s?updated__lte=%s&_ctf' % (tag,
|
||||||
|
dtstr))
|
||||||
|
req.user = su
|
||||||
|
vs = viewset.as_view({'get': 'list'})
|
||||||
|
res = vs(req)
|
||||||
|
cache["%s-%s" % (tag, depth)] = renderer.render(
|
||||||
|
res.data, renderer_context={
|
||||||
|
"response": res
|
||||||
|
})
|
||||||
|
del res
|
||||||
|
del vs
|
||||||
|
|
||||||
|
for id, data in cache.items():
|
||||||
|
self.log(id, "saving file")
|
||||||
|
with open(
|
||||||
|
os.path.join(settings.API_CACHE_ROOT,
|
||||||
|
"%s.json" % (id)), "w+") as output:
|
||||||
|
output.write(data)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
self.log("error", traceback.format_exc())
|
||||||
|
raise
|
||||||
|
|
||||||
|
t2 = time.time()
|
||||||
|
|
||||||
|
print "Finished after %.2f seconds" % (t2 - t)
|
2850
peeringdb_server/management/commands/pdb_api_test.py
Normal file
2850
peeringdb_server/management/commands/pdb_api_test.py
Normal file
File diff suppressed because it is too large
Load Diff
102
peeringdb_server/management/commands/pdb_batch_replace.py
Normal file
102
peeringdb_server/management/commands/pdb_batch_replace.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
|
||||||
|
import reversion
|
||||||
|
import peeringdb_server.models as pdbm
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Replace a value in a field across several entities"
|
||||||
|
pretend = False
|
||||||
|
|
||||||
|
# this defines which ref_tag field_name combinations may
|
||||||
|
# be targeted by this command, this is a safety measure
|
||||||
|
# extend as needed
|
||||||
|
valid_targets = {"fac": ["name", "org_id"]}
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--commit', action='store_true',
|
||||||
|
help="will commit the fac merge")
|
||||||
|
parser.add_argument('--search',
|
||||||
|
help="<ref_tag>.<field_name>:<search_value>")
|
||||||
|
parser.add_argument(
|
||||||
|
'--replace',
|
||||||
|
help="<field_name>:<search_value>:<replacement_value>")
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
if not self.commit:
|
||||||
|
print "[%s] %s [pretend]" % (self.target, msg)
|
||||||
|
else:
|
||||||
|
print "[%s] %s" % (self.target, msg)
|
||||||
|
|
||||||
|
@reversion.create_revision()
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
|
||||||
|
self.commit = options.get("commit", False)
|
||||||
|
self.search = options.get("search")
|
||||||
|
self.replace = options.get("replace")
|
||||||
|
|
||||||
|
if not self.search:
|
||||||
|
raise CommandError(
|
||||||
|
"Specify search parameters using the --search option")
|
||||||
|
|
||||||
|
if not self.replace:
|
||||||
|
raise CommandError(
|
||||||
|
"Specify replacement parameters using the --replace option")
|
||||||
|
|
||||||
|
try:
|
||||||
|
search_field, search_value = self.search.split(":")
|
||||||
|
ref_tag, search_field = search_field.split(".")
|
||||||
|
except:
|
||||||
|
raise CommandError(
|
||||||
|
"Format for --search: <ref_tag>.<field_name>:<search_value>")
|
||||||
|
|
||||||
|
try:
|
||||||
|
m = re.match("^([^:]+):([^:]+):(.+)$", self.replace)
|
||||||
|
replace_field = m.group(1)
|
||||||
|
replace_search_value = m.group(2)
|
||||||
|
replace_value = m.group(3)
|
||||||
|
except:
|
||||||
|
raise CommandError(
|
||||||
|
"Format for --replace: <field_name>:<search_value>:<replacement_value>"
|
||||||
|
)
|
||||||
|
|
||||||
|
#if replace_field not in self.valid_targets.get(ref_tag,[]):
|
||||||
|
# raise CommandError("%s.%s is not a valid target for this script at this point, please add it to the valid_targets map" % (ref_tag, replace_field))
|
||||||
|
|
||||||
|
self.target = "%s.%s" % (ref_tag, search_field)
|
||||||
|
|
||||||
|
self.log("Searching for %s where %s matches '%s' ..." %
|
||||||
|
(ref_tag, search_field, search_value))
|
||||||
|
|
||||||
|
q = pdbm.REFTAG_MAP[ref_tag].objects.filter(status="ok")
|
||||||
|
c = 0
|
||||||
|
|
||||||
|
for e in q:
|
||||||
|
val = getattr(e, search_field)
|
||||||
|
if re.search(search_value, val) != None:
|
||||||
|
t_val = getattr(e, replace_field)
|
||||||
|
r_val = None
|
||||||
|
if replace_search_value == "*":
|
||||||
|
r_val = replace_value
|
||||||
|
elif isinstance(t_val, basestring):
|
||||||
|
r_val = re.sub(replace_search_value, replace_value, t_val)
|
||||||
|
elif type(t_val) == long:
|
||||||
|
if t_val == long(replace_search_value or 0):
|
||||||
|
r_val = replace_value
|
||||||
|
elif type(t_val) == int:
|
||||||
|
if t_val == int(replace_search_value or 0):
|
||||||
|
r_val = replace_value
|
||||||
|
else:
|
||||||
|
if t_val == replace_search_value:
|
||||||
|
r_val = replace_value
|
||||||
|
if r_val is None:
|
||||||
|
continue
|
||||||
|
self.log("(<%s> id:%s) Changing %s from '%s' to '%s'" %
|
||||||
|
(e, e.id, replace_field, t_val, r_val))
|
||||||
|
c += 1
|
||||||
|
if self.commit:
|
||||||
|
setattr(e, replace_field, r_val)
|
||||||
|
e.save()
|
||||||
|
|
||||||
|
self.log("%d objects were changed." % c)
|
48
peeringdb_server/management/commands/pdb_deskpro_publish.py
Normal file
48
peeringdb_server/management/commands/pdb_deskpro_publish.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from peeringdb_server import models
|
||||||
|
from peeringdb_server.deskpro import APIClient, APIError
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Process deskpro ticket queue"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
print(msg)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
client = APIClient(settings.DESKPRO_URL, settings.DESKPRO_KEY)
|
||||||
|
self.log(u"DESKPRO: {}".format(settings.DESKPRO_URL))
|
||||||
|
ticket_qs = models.DeskProTicket.objects.filter(
|
||||||
|
published__isnull=True).order_by("created")
|
||||||
|
|
||||||
|
if not ticket_qs.count():
|
||||||
|
self.log("No tickets in queue")
|
||||||
|
return
|
||||||
|
|
||||||
|
for ticket in ticket_qs[:10]:
|
||||||
|
self.log(u"Posting to Deskpro: #{}".format(ticket.id))
|
||||||
|
|
||||||
|
try:
|
||||||
|
client.create_ticket(ticket)
|
||||||
|
ticket.published = datetime.datetime.now().replace(
|
||||||
|
tzinfo=models.UTC())
|
||||||
|
ticket.save()
|
||||||
|
except APIError as exc:
|
||||||
|
self.log(
|
||||||
|
u"!!!! Could not create ticket #{} - error data has been attached to ticket body.".
|
||||||
|
format(ticket.id))
|
||||||
|
ticket.published = datetime.datetime.now().replace(
|
||||||
|
tzinfo=models.UTC())
|
||||||
|
ticket.subject = u"[FAILED] {}".format(ticket.subject)
|
||||||
|
ticket.body = u"{}\nAPI Delivery Error: {}".format(
|
||||||
|
ticket.body, exc.data)
|
||||||
|
ticket.save()
|
49
peeringdb_server/management/commands/pdb_deskpro_requeue.py
Normal file
49
peeringdb_server/management/commands/pdb_deskpro_requeue.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from peeringdb_server.models import DeskProTicket
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Reset a deskpro ticket and queue again for publish"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("id", nargs="?", help="ticket id")
|
||||||
|
parser.add_argument('--commit', action='store_true',
|
||||||
|
help="will commit the changes")
|
||||||
|
parser.add_argument('--only-failed', action='store_true',
|
||||||
|
help="only requeue failed tickets")
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
if self.commit:
|
||||||
|
self.stdout.write(msg)
|
||||||
|
else:
|
||||||
|
self.stdout.write("[pretend] {}".format(msg))
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
_id = options.get("id")
|
||||||
|
self.commit = options.get("commit")
|
||||||
|
self.only_failed = options.get("only_failed")
|
||||||
|
|
||||||
|
qset = DeskProTicket.objects
|
||||||
|
if _id[0] == "g":
|
||||||
|
self.log("Requeuing tickets with id greater than {}".format(
|
||||||
|
_id[1:]))
|
||||||
|
qset = qset.filter(pk__gt=_id[1:])
|
||||||
|
elif _id[0] == "l":
|
||||||
|
self.log("Requeuing tickets with id smaller than {}".format(
|
||||||
|
_id[1:]))
|
||||||
|
qset = qset.filter(pk__lt=_id[1:])
|
||||||
|
else:
|
||||||
|
qset = qset.filter(pk=_id)
|
||||||
|
|
||||||
|
for ticket in qset:
|
||||||
|
if self.only_failed and ticket.subject.find("[FAILED]") == -1:
|
||||||
|
continue
|
||||||
|
self.log("Requeuing ticket with id {}".format(ticket.id))
|
||||||
|
ticket.subject = ticket.subject.replace("[FAILED]", "")
|
||||||
|
ticket.body = re.sub(r'API Delivery Error(.+)$', '', ticket.body)
|
||||||
|
ticket.published = None
|
||||||
|
if self.commit:
|
||||||
|
ticket.save()
|
159
peeringdb_server/management/commands/pdb_fac_merge.py
Normal file
159
peeringdb_server/management/commands/pdb_fac_merge.py
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from peeringdb_server.mail import mail_users_entity_merge
|
||||||
|
|
||||||
|
import reversion
|
||||||
|
import peeringdb_server.models as pdbm
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def soft_delete(fac, cmd):
|
||||||
|
#overriding
|
||||||
|
|
||||||
|
for k in fac._handleref.delete_cascade:
|
||||||
|
q = getattr(fac, k).exclude(status="deleted")
|
||||||
|
for c in q:
|
||||||
|
if c not in cmd.moved:
|
||||||
|
soft_delete(c, cmd)
|
||||||
|
|
||||||
|
cmd.log("soft deleting %s" % fac)
|
||||||
|
if cmd.commit:
|
||||||
|
fac.status = "deleted"
|
||||||
|
fac.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "merge facilities"
|
||||||
|
pretend = False
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--commit', action='store_true',
|
||||||
|
help="will commit the fac merge")
|
||||||
|
parser.add_argument('--target',
|
||||||
|
help="target of the merge (facility id)")
|
||||||
|
parser.add_argument('--ids',
|
||||||
|
help="comma separated list of facility ids")
|
||||||
|
parser.add_argument(
|
||||||
|
'--match',
|
||||||
|
help="all facs with matching names will be merged (regex)")
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
if not self.commit:
|
||||||
|
self.stdout.write("%s [pretend]" % msg)
|
||||||
|
else:
|
||||||
|
self.stdout.write(msg)
|
||||||
|
|
||||||
|
@reversion.create_revision()
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
self.commit = options.get("commit", False)
|
||||||
|
self.moved = []
|
||||||
|
|
||||||
|
self.target = options.get("target", 0)
|
||||||
|
if not self.target:
|
||||||
|
msg = "Target ID required (--target)"
|
||||||
|
self.log(msg)
|
||||||
|
raise CommandError(msg)
|
||||||
|
|
||||||
|
self.match = options.get("match", None)
|
||||||
|
self.ids = options.get("ids", "")
|
||||||
|
|
||||||
|
facs = []
|
||||||
|
moved = self.moved
|
||||||
|
|
||||||
|
if self.match:
|
||||||
|
if self.ids:
|
||||||
|
msg = 'ids and match are mutually exclusive'
|
||||||
|
self.log(msg)
|
||||||
|
raise CommandError(msg)
|
||||||
|
|
||||||
|
self.log("Merging all facilities matching '%s'" % self.match)
|
||||||
|
for fac in pdbm.Facility.objects.exclude(status="deleted"):
|
||||||
|
if re.match(self.match, fac.name, re.IGNORECASE):
|
||||||
|
facs.append(fac)
|
||||||
|
|
||||||
|
elif self.ids:
|
||||||
|
self.ids = self.ids.split(",")
|
||||||
|
self.log("Merging facilities %s -> %s" % (", ".join(self.ids),
|
||||||
|
self.target))
|
||||||
|
for fac in pdbm.Facility.objects.filter(id__in=self.ids):
|
||||||
|
facs.append(fac)
|
||||||
|
|
||||||
|
else:
|
||||||
|
msg = 'IDs or match is required'
|
||||||
|
self.log(msg)
|
||||||
|
raise CommandError(msg)
|
||||||
|
|
||||||
|
self.target = pdbm.Facility.objects.get(id=self.target)
|
||||||
|
|
||||||
|
if self.target.status == "deleted":
|
||||||
|
self.target.status = "ok"
|
||||||
|
if self.commit:
|
||||||
|
self.target.save()
|
||||||
|
|
||||||
|
for fac in facs:
|
||||||
|
if fac.id == self.target.id:
|
||||||
|
continue
|
||||||
|
self.log("Merging %s (%d) .." % (fac, fac.id))
|
||||||
|
for netfac in pdbm.NetworkFacility.objects.filter(
|
||||||
|
facility=fac).exclude(status="deleted"):
|
||||||
|
netfac_other = pdbm.NetworkFacility.objects.filter(
|
||||||
|
facility=self.target, network_id=netfac.network_id)
|
||||||
|
# we check if the target fac already has a netfac to the same network (that is currently undeleted), if it does we skip it
|
||||||
|
if netfac_other.exclude(status="deleted").exists():
|
||||||
|
self.log(
|
||||||
|
" - netfac %s : connection already exists at target, skipping."
|
||||||
|
% netfac)
|
||||||
|
continue
|
||||||
|
# if it exists but is currently delete, we simply undelete it
|
||||||
|
elif netfac_other.exists():
|
||||||
|
netfac_other = netfac_other.first()
|
||||||
|
netfac_other.local_asn = netfac.local_asn
|
||||||
|
netfac_other.avail_sonet = netfac.avail_sonet
|
||||||
|
netfac_other.avail_ethernet = netfac.avail_ethernet
|
||||||
|
netfac_other.avail_atm = netfac.avail_atm
|
||||||
|
netfac_other.status = "ok"
|
||||||
|
self.log(" - netfac %s (undeleting and updating)" %
|
||||||
|
netfac_other)
|
||||||
|
moved.append(netfac_other)
|
||||||
|
if self.commit:
|
||||||
|
netfac_other.save()
|
||||||
|
# if it doesnt exist, we update the facility to the target facility and save
|
||||||
|
else:
|
||||||
|
self.log(" - netfac %s" % netfac)
|
||||||
|
netfac.facility = self.target
|
||||||
|
moved.append(netfac)
|
||||||
|
if self.commit:
|
||||||
|
netfac.save()
|
||||||
|
|
||||||
|
for ixfac in pdbm.InternetExchangeFacility.objects.filter(
|
||||||
|
facility=fac).exclude(status="deleted"):
|
||||||
|
ixfac_other = pdbm.InternetExchangeFacility.objects.filter(
|
||||||
|
facility=self.target, ix=ixfac.ix)
|
||||||
|
# we check if the target fac already has an ixfac to the same exchange (that is currently undeleted), if it does, we skip it
|
||||||
|
if ixfac_other.exclude(status="deleted").exists():
|
||||||
|
self.log(
|
||||||
|
" - ixfac %s : connection already exists at target, skipping."
|
||||||
|
% ixfac)
|
||||||
|
continue
|
||||||
|
# if it exists but is currently deleted, we undelete and copy
|
||||||
|
elif ixfac_other.exists():
|
||||||
|
ixfac_other = ixfac_other.first()
|
||||||
|
ixfac_other.status = "ok"
|
||||||
|
moved.append(ixfac_other)
|
||||||
|
self.log(
|
||||||
|
" - ixfac %s (undeleting and updating)" % ixfac_other)
|
||||||
|
if self.commit:
|
||||||
|
ixfac_other.save()
|
||||||
|
# if it doesnt exist, we update the facility to the target facility and save
|
||||||
|
else:
|
||||||
|
self.log(" - ixfac %s" % ixfac)
|
||||||
|
ixfac.facility = self.target
|
||||||
|
moved.append(ixfac)
|
||||||
|
if self.commit:
|
||||||
|
ixfac.save()
|
||||||
|
|
||||||
|
soft_delete(fac, self)
|
||||||
|
if self.commit:
|
||||||
|
mail_users_entity_merge(
|
||||||
|
fac.org.admin_usergroup.user_set.all(),
|
||||||
|
self.target.org.admin_usergroup.user_set.all(), fac,
|
||||||
|
self.target)
|
121
peeringdb_server/management/commands/pdb_fac_merge_undo.py
Normal file
121
peeringdb_server/management/commands/pdb_fac_merge_undo.py
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
import re
|
||||||
|
import reversion
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from peeringdb_server.models import (CommandLineTool, Facility,
|
||||||
|
NetworkFacility, InternetExchangeFacility)
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Undo a facility merge from merge log (either --log or --clt needs to provided)"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--commit', action='store_true',
|
||||||
|
help="will commit the fac merge")
|
||||||
|
parser.add_argument('--log', help="merge log file")
|
||||||
|
parser.add_argument(
|
||||||
|
'--clt', help=
|
||||||
|
"command line tool instance - this allows you to undo if the command was run from the admin UI"
|
||||||
|
)
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
if not self.commit:
|
||||||
|
self.stdout.write("[pretend] {}".format(msg))
|
||||||
|
else:
|
||||||
|
self.stdout.write(msg)
|
||||||
|
|
||||||
|
@reversion.create_revision()
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
self.commit = options.get("commit", False)
|
||||||
|
self.log_file = options.get("log")
|
||||||
|
self.clt_id = options.get("clt")
|
||||||
|
|
||||||
|
if self.log_file:
|
||||||
|
with open(self.log_file, "r") as fh:
|
||||||
|
log = fh.readlines()
|
||||||
|
elif self.clt_id:
|
||||||
|
clt = CommandLineTool.objects.get(id=self.clt_id,
|
||||||
|
tool="pdb_fac_merge")
|
||||||
|
log = clt.result.split("\n")
|
||||||
|
else:
|
||||||
|
self.log("[error] no suitable log provided")
|
||||||
|
return
|
||||||
|
|
||||||
|
regex_facilities = "Merging facilities (.+) -> (\d+)"
|
||||||
|
regex_netfac = " - netfac NetworkFacility-netfac(\d+)$"
|
||||||
|
regex_ixfac = " - ixfac InternetExchangeFacility-ixfac(\d+)$"
|
||||||
|
regex_source = "Merging (.+) \((\d+)\) .."
|
||||||
|
regex_delete_netfac = "soft deleting NetworkFacility-netfac(\d+)"
|
||||||
|
regex_delete_ixfac = "soft deleting InternetExchangeFacility-ixfac(\d+)"
|
||||||
|
|
||||||
|
sources = {}
|
||||||
|
source = None
|
||||||
|
for line in log:
|
||||||
|
if re.match(regex_facilities, line):
|
||||||
|
match = re.match(regex_facilities, line)
|
||||||
|
sources = dict([(fac.id, fac)
|
||||||
|
for fac in Facility.objects.filter(
|
||||||
|
id__in=match.group(1).split(", "))])
|
||||||
|
target = Facility.objects.get(id=match.group(2))
|
||||||
|
|
||||||
|
for source in sources.values():
|
||||||
|
if source.org.status != "ok":
|
||||||
|
self.log(
|
||||||
|
"[error] Parent organization {} of facility {} currently has status `{}`, as such the facility cannot be undeleted, please fix the organization and run the script again".
|
||||||
|
format(source.org, source, source.org.status))
|
||||||
|
return
|
||||||
|
|
||||||
|
for source in sources.values():
|
||||||
|
if source.status == "ok" and not self.commit:
|
||||||
|
self.log(
|
||||||
|
"[warning] Looks like this merge has already been undone one way or another, please double check before committing this command"
|
||||||
|
)
|
||||||
|
source.status = "ok"
|
||||||
|
self.log("Undeleting facility {} (#{})".format(
|
||||||
|
source, source.id))
|
||||||
|
if self.commit:
|
||||||
|
source.save()
|
||||||
|
|
||||||
|
source = None
|
||||||
|
|
||||||
|
elif re.match(regex_source, line):
|
||||||
|
match = re.match(regex_source, line)
|
||||||
|
source = sources[int(match.group(2))]
|
||||||
|
self.log("======================")
|
||||||
|
self.log("Undoing merge {} (#{})".format(source, source.id))
|
||||||
|
|
||||||
|
elif re.match(regex_netfac, line):
|
||||||
|
match = re.match(regex_netfac, line)
|
||||||
|
netfac = NetworkFacility.objects.get(id=match.group(1))
|
||||||
|
netfac.status = "ok"
|
||||||
|
netfac.facility = source
|
||||||
|
self.log("Undoing network facility merge (#{})".format(
|
||||||
|
netfac.id))
|
||||||
|
if self.commit:
|
||||||
|
netfac.save()
|
||||||
|
|
||||||
|
elif re.match(regex_delete_netfac, line):
|
||||||
|
match = re.match(regex_delete_netfac, line)
|
||||||
|
netfac = NetworkFacility.objects.get(id=match.group(1))
|
||||||
|
netfac.status = "ok"
|
||||||
|
self.log("Undoing network facility deletion (#{})".format(
|
||||||
|
netfac.id))
|
||||||
|
if self.commit:
|
||||||
|
netfac.save()
|
||||||
|
|
||||||
|
elif re.match(regex_ixfac, line):
|
||||||
|
match = re.match(regex_ixfac, line)
|
||||||
|
ixfac = InternetExchangeFacility.objects.get(id=match.group(1))
|
||||||
|
ixfac.status = "ok"
|
||||||
|
ixfac.facility = source
|
||||||
|
self.log("Undoing ix facility merge (#{})".format(ixfac.id))
|
||||||
|
if self.commit:
|
||||||
|
ixfac.save()
|
||||||
|
|
||||||
|
elif re.match(regex_delete_ixfac, line):
|
||||||
|
match = re.match(regex_delete_ixfac, line)
|
||||||
|
ixfac = InternetExchangeFacility.objects.get(id=match.group(1))
|
||||||
|
ixfac.status = "ok"
|
||||||
|
self.log("Undoing ix facility deletion (#{})".format(ixfac.id))
|
||||||
|
if self.commit:
|
||||||
|
ixfac.save()
|
105
peeringdb_server/management/commands/pdb_generate_test_data.py
Normal file
105
peeringdb_server/management/commands/pdb_generate_test_data.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
import googlemaps
|
||||||
|
import reversion
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from peeringdb_server import models
|
||||||
|
from peeringdb_server.mock import Mock
|
||||||
|
|
||||||
|
from django.contrib.auth.models import Group
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
|
||||||
|
help = "Will create test data. This will wipe all data locally, so use with caution. This command is NOT to be run on production or beta environments."
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--commit', action='store_true',
|
||||||
|
help="will commit the changes")
|
||||||
|
|
||||||
|
parser.add_argument('--limit', type=int, default=2)
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
if self.commit:
|
||||||
|
self.stdout.write(msg)
|
||||||
|
else:
|
||||||
|
self.stdout.write("[pretend] {}".format(msg))
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
self.commit = options.get("commit")
|
||||||
|
self.limit = options.get("limit")
|
||||||
|
|
||||||
|
if settings.RELEASE_ENV in ["prod", "beta"]:
|
||||||
|
self.log(
|
||||||
|
"This command is only allowed to run on dev / test instances")
|
||||||
|
return
|
||||||
|
|
||||||
|
self.mock = Mock()
|
||||||
|
self.generate()
|
||||||
|
|
||||||
|
def wipe(self):
|
||||||
|
if not self.commit:
|
||||||
|
return
|
||||||
|
|
||||||
|
# we wipe all data by simply deleting all organizations
|
||||||
|
# since everything in the end is a child of an organization
|
||||||
|
# it will wipe all peeringdb data
|
||||||
|
models.Organization.objects.all().delete()
|
||||||
|
|
||||||
|
# delete all org specific user groups
|
||||||
|
Group.objects.filter(name__startswith="org.").delete()
|
||||||
|
|
||||||
|
@reversion.create_revision()
|
||||||
|
def generate(self):
|
||||||
|
self.entities = dict([(k, []) for k in models.REFTAG_MAP.keys()])
|
||||||
|
queue = [
|
||||||
|
"org", "net", "ix", "fac", "ixlan", "ixpfx", "ixfac", "netixlan",
|
||||||
|
"netfac", "poc"
|
||||||
|
]
|
||||||
|
|
||||||
|
self.log("Wiping current data ...")
|
||||||
|
self.wipe()
|
||||||
|
self.log(
|
||||||
|
"Making {} of each - Use the --limit option to increase or decrease (5 max)".
|
||||||
|
format(self.limit))
|
||||||
|
|
||||||
|
if not self.commit:
|
||||||
|
return
|
||||||
|
|
||||||
|
for i in range(0, self.limit):
|
||||||
|
for reftag in queue:
|
||||||
|
params = {}
|
||||||
|
|
||||||
|
# create apropriate relations to previously
|
||||||
|
# create objects
|
||||||
|
if reftag in ["ixpfx", "netixlan"]:
|
||||||
|
params.update(ixlan=self.entities["ixlan"][i])
|
||||||
|
if reftag in ["poc", "netfac", "netixlan"]:
|
||||||
|
params.update(network=self.entities["net"][i])
|
||||||
|
if reftag in ["netfac", "ixfac"]:
|
||||||
|
params.update(facility=self.entities["fac"][i])
|
||||||
|
if reftag in ["ixlan", "ixfac"]:
|
||||||
|
params.update(ix=self.entities["ix"][i])
|
||||||
|
if reftag in ["ix", "net", "fac"]:
|
||||||
|
params.update(org=self.entities["org"][i])
|
||||||
|
|
||||||
|
# create object
|
||||||
|
entity = self.mock.create(reftag, **params)
|
||||||
|
self.entities[reftag].append(entity)
|
||||||
|
|
||||||
|
# for prefixes we also want to create one for the IPv6
|
||||||
|
# protocol
|
||||||
|
if reftag == "ixpfx":
|
||||||
|
params.update(protocol="IPv6")
|
||||||
|
entity = self.mock.create(reftag, **params)
|
||||||
|
self.entities[reftag].append(entity)
|
||||||
|
|
||||||
|
self.entities["net"].append(self.mock.create("net"))
|
||||||
|
self.entities["ix"].append(self.mock.create("ix"))
|
||||||
|
self.entities["fac"].append(self.mock.create("fac"))
|
||||||
|
|
||||||
|
self.entities["org"].append(self.mock.create("org"))
|
||||||
|
|
||||||
|
for reftag, entities in self.entities.items():
|
||||||
|
self.log("Created {} {}s".format(len(entities), reftag))
|
58
peeringdb_server/management/commands/pdb_geosync.py
Normal file
58
peeringdb_server/management/commands/pdb_geosync.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import googlemaps
|
||||||
|
import reversion
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from peeringdb_server import models
|
||||||
|
|
||||||
|
API_KEY = settings.GOOGLE_GEOLOC_API_KEY
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Sync latitude and longitude on all geocoding enabled entities"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"reftag", nargs="?", help=
|
||||||
|
"can be reftag only such as 'fac' or reftag with id to only sync that specific entity such as 'fac.1'"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--limit", type=int, default=0,
|
||||||
|
help="limit how many rows are synced, useful for testing")
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
print(msg)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
reftag = options.get("reftag")
|
||||||
|
limit = options.get("limit")
|
||||||
|
if reftag.find(".") > -1:
|
||||||
|
reftag, _id = reftag.split(".")
|
||||||
|
else:
|
||||||
|
_id = 0
|
||||||
|
self.gmaps = googlemaps.Client(API_KEY, timeout=5)
|
||||||
|
self.sync(reftag, _id, limit=limit)
|
||||||
|
|
||||||
|
@reversion.create_revision()
|
||||||
|
def sync(self, reftag, _id, limit=0):
|
||||||
|
model = models.REFTAG_MAP.get(reftag)
|
||||||
|
if not model:
|
||||||
|
raise ValueError("Unknown reftag: %s" % reftag)
|
||||||
|
if not hasattr(model, "geocode_status"):
|
||||||
|
raise TypeError(
|
||||||
|
"Can only geosync models containing GeocodeBaseMixin")
|
||||||
|
q = model.handleref.undeleted().filter(geocode_status=False)
|
||||||
|
if _id:
|
||||||
|
q = q.filter(id=_id)
|
||||||
|
count = q.count()
|
||||||
|
if limit > 0:
|
||||||
|
q = q[:limit]
|
||||||
|
i = 0
|
||||||
|
for entity in q:
|
||||||
|
if entity.geocode_status:
|
||||||
|
continue
|
||||||
|
i += 1
|
||||||
|
self.log("Syncing %s [%s %d/%d ID:%s]" % (entity, reftag, i, count,
|
||||||
|
entity.id))
|
||||||
|
entity.geocode(self.gmaps)
|
@@ -0,0 +1,47 @@
|
|||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.db import transaction
|
||||||
|
|
||||||
|
from peeringdb_server.models import (
|
||||||
|
IXLan, )
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Updates netixlan instances for all ixlans that have their ixf_ixp_member_list_url specified"
|
||||||
|
commit = False
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--commit', action='store_true',
|
||||||
|
help="will commit changes to the database")
|
||||||
|
parser.add_argument('--only', type=int, default=0,
|
||||||
|
help="Only process this ixlan")
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
if self.commit:
|
||||||
|
print(msg)
|
||||||
|
else:
|
||||||
|
print("[Pretend] {}".format(msg))
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
self.commit = options.get("commit", False)
|
||||||
|
only_id = options.get("only", 0)
|
||||||
|
q = IXLan.objects.filter(status="ok").exclude(
|
||||||
|
ixf_ixp_member_list_url__isnull=True)
|
||||||
|
if only_id:
|
||||||
|
q = q.filter(id=only_id)
|
||||||
|
for ixlan in q:
|
||||||
|
self.log("Fetching data for {} from {}".format(
|
||||||
|
ixlan, ixlan.ixf_ixp_member_list_url))
|
||||||
|
try:
|
||||||
|
json_data = ixlan.fetch_ixf_ixp_members_list()
|
||||||
|
self.log("Updating {}".format(ixlan))
|
||||||
|
with transaction.atomic():
|
||||||
|
success, netixlans, netixlans_deleted, log = ixlan.update_from_ixf_ixp_member_list(
|
||||||
|
json_data, save=self.commit)
|
||||||
|
for line in log:
|
||||||
|
self.log(line)
|
||||||
|
self.log("Done: {} updated: {} deleted: {}".format(
|
||||||
|
success, len(netixlans), len(netixlans_deleted)))
|
||||||
|
except Exception as inst:
|
||||||
|
self.log("ERROR: {}".format(inst))
|
||||||
|
self.log(traceback.format_exc())
|
52
peeringdb_server/management/commands/pdb_ixp_merge.py
Normal file
52
peeringdb_server/management/commands/pdb_ixp_merge.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from peeringdb_server.mail import mail_users_entity_merge
|
||||||
|
|
||||||
|
import reversion
|
||||||
|
import peeringdb_server.models as pdbm
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
args = "<from_ixp_id> <to_ixp_id>"
|
||||||
|
help = "merge one ixp into another ixp"
|
||||||
|
commit = False
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--target', help="merge into this ixp")
|
||||||
|
parser.add_argument(
|
||||||
|
'--ids', help=
|
||||||
|
"merge these ixps (note: target ixp specified with the --target option)"
|
||||||
|
)
|
||||||
|
parser.add_argument('--commit', action='store_true',
|
||||||
|
help="commit changes")
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
if not self.commit:
|
||||||
|
print "%s [pretend]" % msg
|
||||||
|
else:
|
||||||
|
print msg
|
||||||
|
|
||||||
|
@reversion.create_revision()
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
|
||||||
|
args = list(args)
|
||||||
|
|
||||||
|
self.commit = options.get("commit", False)
|
||||||
|
|
||||||
|
ixp_from = pdbm.InternetExchange.objects.get(id=options.get("ids"))
|
||||||
|
ixp_to = pdbm.InternetExchange.objects.get(id=options.get("target"))
|
||||||
|
|
||||||
|
self.log("Merging %s into %s" % (ixp_from.name, ixp_to.name))
|
||||||
|
|
||||||
|
ixlans_from = pdbm.IXLan.objects.filter(ix=ixp_from).exclude(
|
||||||
|
status="deleted")
|
||||||
|
for ixlan in ixlans_from:
|
||||||
|
ixlan.ix = ixp_to
|
||||||
|
self.log("Moving IXLAN %s to %s" % (ixlan.id, ixp_to.name))
|
||||||
|
if self.commit:
|
||||||
|
ixlan.save()
|
||||||
|
self.log("Soft Deleting %s" % ixp_from.name)
|
||||||
|
if self.commit:
|
||||||
|
ixp_from.delete()
|
||||||
|
mail_users_entity_merge(
|
||||||
|
ixp_from.org.admin_usergroup.user_set.all(),
|
||||||
|
ixp_to.org.admin_usergroup.user_set.all(), ixp_from, ixp_to)
|
120
peeringdb_server/management/commands/pdb_renumber_lans.py
Normal file
120
peeringdb_server/management/commands/pdb_renumber_lans.py
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from peeringdb_server.models import IXLanPrefix, NetworkIXLan
|
||||||
|
import reversion
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Renumber addresses, by providing the first three octects of a current ip4 address and the first three octets to change to."
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
'--commit', action='store_true',
|
||||||
|
help="commit changes, otherwise run in pretend mode")
|
||||||
|
parser.add_argument(
|
||||||
|
'--ix', default=0, help=
|
||||||
|
"exchange id, if set only renumber matches in ixlans owned by the specified exchange"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--ixlan', default=0,
|
||||||
|
help="ixlan id, if set only renumber matches in this specific ixlan"
|
||||||
|
)
|
||||||
|
parser.add_argument("old", nargs="+", type=str)
|
||||||
|
parser.add_argument("new", nargs="+", type=str)
|
||||||
|
|
||||||
|
def log(self, id, msg):
|
||||||
|
if not self.commit:
|
||||||
|
self.stdout.write("[pretend] %s: %s" % (id, msg))
|
||||||
|
else:
|
||||||
|
self.stdout.write("%s: %s" % (id, msg))
|
||||||
|
|
||||||
|
@reversion.create_revision()
|
||||||
|
def renumber_lans(self, old, new):
|
||||||
|
"""
|
||||||
|
Renumber ixlan and netixlan ip4 addresses, changing the first
|
||||||
|
three octets but keep the final octact in tact
|
||||||
|
|
||||||
|
193.232.245.* -> 195.208.209.*
|
||||||
|
"""
|
||||||
|
|
||||||
|
if len(old.split(".")) != 3:
|
||||||
|
raise ValueError(
|
||||||
|
"'Old Prefix' needs to be the first three octets of a IPv4 ip address - such as 195.232.245"
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(new.split(".")) != 3:
|
||||||
|
raise ValueError(
|
||||||
|
"'New Prefix' needs to be the first three octets of a IPv4 ip address - such as 195.232.245"
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.ixlan:
|
||||||
|
self.log("init", "Renumbering only in ixlan #ID:{}".format(
|
||||||
|
self.ixlan))
|
||||||
|
prefixes = IXLanPrefix.objects.filter(
|
||||||
|
protocol="IPv4", prefix__startswith="%s." % old,
|
||||||
|
ixlan_id=self.ixlan)
|
||||||
|
netixlans = NetworkIXLan.objects.filter(
|
||||||
|
ipaddr4__startswith="%s." % old, ixlan_id=self.ixlan)
|
||||||
|
elif self.ix:
|
||||||
|
self.log("init", "Renumbering only in exchange #ID:{}".format(
|
||||||
|
self.ix))
|
||||||
|
prefixes = IXLanPrefix.objects.filter(
|
||||||
|
protocol="IPv4", prefix__startswith="%s." % old,
|
||||||
|
ixlan__ix_id=self.ix)
|
||||||
|
netixlans = NetworkIXLan.objects.filter(
|
||||||
|
ipaddr4__startswith="%s." % old, ixlan__ix_id=self.ix)
|
||||||
|
else:
|
||||||
|
prefixes = IXLanPrefix.objects.filter(
|
||||||
|
protocol="IPv4", prefix__startswith="%s." % old)
|
||||||
|
netixlans = NetworkIXLan.objects.filter(
|
||||||
|
ipaddr4__startswith="%s." % old)
|
||||||
|
|
||||||
|
for prefix in prefixes:
|
||||||
|
new_prefix = unicode(".".join(
|
||||||
|
new.split(".")[:3] + [str(prefix.prefix).split(".")[-1]]))
|
||||||
|
self.log(IXLanPrefix._handleref.tag, "%s <IXLAN:%d> %s -> %s" %
|
||||||
|
(prefix.ixlan.ix, prefix.ixlan_id, prefix.prefix,
|
||||||
|
new_prefix))
|
||||||
|
|
||||||
|
if self.commit:
|
||||||
|
prefix.prefix = new_prefix
|
||||||
|
prefix.save()
|
||||||
|
|
||||||
|
for netixlan in netixlans:
|
||||||
|
new_addr = unicode(".".join(
|
||||||
|
new.split(".")[:3] + [str(netixlan.ipaddr4).split(".")[-1]]))
|
||||||
|
|
||||||
|
other = NetworkIXLan.objects.filter(ipaddr4=new_addr, status="ok")
|
||||||
|
|
||||||
|
if other.exists():
|
||||||
|
other = other.first()
|
||||||
|
self.log(
|
||||||
|
NetworkIXLan._handleref.tag,
|
||||||
|
"[error] {} (IXLAN:{}) {} -> {}: Address {} already exists in IXLAN:{} under IX:{}".
|
||||||
|
format(netixlan.ixlan.ix, netixlan.ixlan_id,
|
||||||
|
netixlan.ipaddr4, new_addr, new_addr,
|
||||||
|
other.ixlan_id, other.ixlan.ix_id))
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
self.log(
|
||||||
|
NetworkIXLan._handleref.tag, "%s (IXLAN:%d) %s -> %s" %
|
||||||
|
(netixlan.ixlan.ix, netixlan.ixlan_id, netixlan.ipaddr4,
|
||||||
|
new_addr))
|
||||||
|
|
||||||
|
if self.commit:
|
||||||
|
netixlan.ipaddr4 = new_addr
|
||||||
|
netixlan.save()
|
||||||
|
|
||||||
|
def p_usage(self):
|
||||||
|
print "USAGE: <old> <new> [options]"
|
||||||
|
print "EXAMPLE: pdb_renumber_lans 193.232.245 195.208.209"
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
self.commit = options.get("commit", False)
|
||||||
|
self.ixlan = int(options.get("ixlan", 0))
|
||||||
|
self.ix = int(options.get("ix", 0))
|
||||||
|
old = options.get("old")[0]
|
||||||
|
new = options.get("new")[0]
|
||||||
|
|
||||||
|
if not old or not new:
|
||||||
|
return self.p_usage()
|
||||||
|
self.renumber_lans(old, new)
|
@@ -0,0 +1,72 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
import json
|
||||||
|
import peeringdb_server.models as pdbm
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from reversion.models import Version
|
||||||
|
|
||||||
|
MODELS = [
|
||||||
|
pdbm.Organization, pdbm.Network, pdbm.InternetExchange,
|
||||||
|
pdbm.InternetExchangeFacility, pdbm.Facility, pdbm.NetworkContact,
|
||||||
|
pdbm.NetworkFacility, pdbm.IXLan, pdbm.IXLanPrefix, pdbm.NetworkIXLan
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
args = "<reftag> <id, id, ...>"
|
||||||
|
help = "Inspect an object's reversion history"
|
||||||
|
|
||||||
|
def log(self, id, msg):
|
||||||
|
print "%s: %s" % (id, msg)
|
||||||
|
|
||||||
|
def print_line(self):
|
||||||
|
print "".join(["-" for i in range(0, 80)])
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("reftag", nargs="?", type=str)
|
||||||
|
parser.add_argument("id", nargs="+", type=int)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
|
||||||
|
versions = Version.objects.all()
|
||||||
|
ref_tag = options.get("reftag")
|
||||||
|
|
||||||
|
ids = [int(i) for i in options.get("id")]
|
||||||
|
|
||||||
|
print(ref_tag, ids)
|
||||||
|
|
||||||
|
model = None
|
||||||
|
for m in MODELS:
|
||||||
|
if m.handleref.tag == ref_tag:
|
||||||
|
model = m
|
||||||
|
break
|
||||||
|
|
||||||
|
if not model:
|
||||||
|
print "Unknown ref tag: %s" % ref_tag
|
||||||
|
return
|
||||||
|
|
||||||
|
content_type = ContentType.objects.get_for_model(model)
|
||||||
|
for id in ids:
|
||||||
|
versions = Version.objects.filter(
|
||||||
|
content_type=content_type,
|
||||||
|
object_id=id).order_by('revision_id')
|
||||||
|
print "%s - %d:" % (ref_tag, id)
|
||||||
|
self.print_line()
|
||||||
|
prev = {}
|
||||||
|
n = 0
|
||||||
|
for version in versions:
|
||||||
|
data = json.loads(version.serialized_data)[0].get("fields")
|
||||||
|
n += 1
|
||||||
|
print "VERSION: %d (%d) - %s - User: %s" % (
|
||||||
|
n, version.id, data.get("updated"), version.revision.user)
|
||||||
|
if not prev:
|
||||||
|
for k, v in data.items():
|
||||||
|
print "%s: '%s'" % (k, v)
|
||||||
|
self.print_line()
|
||||||
|
prev = data
|
||||||
|
continue
|
||||||
|
for k, v in data.items():
|
||||||
|
if prev.get(k) != v:
|
||||||
|
print "%s: '%s' => '%s'" % (k, prev.get(k), v)
|
||||||
|
|
||||||
|
prev = data
|
||||||
|
self.print_line()
|
@@ -0,0 +1,19 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from peeringdb_server.models import Sponsorship
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Looks for expired sponsorships and sends a notification to sponsorship admin for recently expired sponsorships"
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
print(msg)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
|
||||||
|
now = datetime.datetime.now()
|
||||||
|
for sponsorship in Sponsorship.objects.filter(end_date__lt=now):
|
||||||
|
if sponsorship.notify_date is None or sponsorship.notify_date < sponsorship.end_date:
|
||||||
|
b = sponsorship.notify_expiration()
|
||||||
|
#if b:
|
||||||
|
# self.log("Sent expiration notices for %s, expired on %s" % (sponsorship.org.name, sponsorship.end_date))
|
60
peeringdb_server/management/commands/pdb_stats.py
Normal file
60
peeringdb_server/management/commands/pdb_stats.py
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from reversion.models import Version, Revision
|
||||||
|
|
||||||
|
import reversion
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from peeringdb_server.models import REFTAG_MAP, UTC
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""
|
||||||
|
Posts stat breakdown for any given date, if not date is supplied
|
||||||
|
today will be used
|
||||||
|
"""
|
||||||
|
|
||||||
|
tags = ["fac", "ix", "net", "org"]
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("--date", action="store", default=None,
|
||||||
|
help="generate stats for this date")
|
||||||
|
|
||||||
|
def status_at_date(self, obj, dt):
|
||||||
|
versions = Version.objects.get_for_object(obj)
|
||||||
|
version = versions.filter(revision__date_created__lte=dt).order_by(
|
||||||
|
"-revision__date_created").first()
|
||||||
|
if version:
|
||||||
|
return version.field_dict["status"]
|
||||||
|
else:
|
||||||
|
return obj.status
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
date = options.get('date', None)
|
||||||
|
if date:
|
||||||
|
dt = datetime.datetime.strptime(date, "%Y%m%d")
|
||||||
|
else:
|
||||||
|
dt = datetime.datetime.now()
|
||||||
|
|
||||||
|
dt = dt.replace(hour=23, minute=23, second=59, tzinfo=UTC())
|
||||||
|
|
||||||
|
print("{}".format(dt.replace(tzinfo=None).strftime("%Y-%m-%d")))
|
||||||
|
print("-------------")
|
||||||
|
|
||||||
|
stats = {"users": 0}
|
||||||
|
|
||||||
|
for tag in self.tags:
|
||||||
|
model = REFTAG_MAP[tag]
|
||||||
|
stats[tag] = 0
|
||||||
|
for obj in model.objects.filter(created__lte=dt):
|
||||||
|
if self.status_at_date(obj, dt) == "ok":
|
||||||
|
stats[tag] += 1
|
||||||
|
|
||||||
|
print "{}: {}".format(tag, stats[tag])
|
||||||
|
|
||||||
|
for user in get_user_model().objects.filter(created__lte=dt):
|
||||||
|
if user.is_verified:
|
||||||
|
stats["users"] += 1
|
||||||
|
|
||||||
|
print "users: {}".format(stats["users"])
|
108
peeringdb_server/management/commands/pdb_status.py
Normal file
108
peeringdb_server/management/commands/pdb_status.py
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
import peeringdb_server.models as pdbm
|
||||||
|
|
||||||
|
MODELS = [
|
||||||
|
pdbm.Organization, pdbm.Network, pdbm.InternetExchange,
|
||||||
|
pdbm.InternetExchangeFacility, pdbm.Facility, pdbm.NetworkContact,
|
||||||
|
pdbm.NetworkFacility, pdbm.IXLan, pdbm.IXLanPrefix, pdbm.NetworkIXLan
|
||||||
|
]
|
||||||
|
|
||||||
|
STATUS_TYPES = ["ok", "pending", "deleted"]
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Check data status/health"
|
||||||
|
|
||||||
|
def log(self, id, msg):
|
||||||
|
print "%s: %s" % (id, msg)
|
||||||
|
|
||||||
|
def print_line(self):
|
||||||
|
print "".join(["-" for i in range(0, 80)])
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
|
||||||
|
# STATUS: handleref status breakdown
|
||||||
|
self.print_line()
|
||||||
|
self.log("status", "handleref status breakdown")
|
||||||
|
self.print_line()
|
||||||
|
for model in MODELS:
|
||||||
|
counts = {}
|
||||||
|
for c in STATUS_TYPES:
|
||||||
|
counts[c] = model.objects.filter(status=c).count()
|
||||||
|
counts["invalid"] = model.objects.exclude(
|
||||||
|
status__in=STATUS_TYPES).count()
|
||||||
|
|
||||||
|
self.log(model.handleref.tag, " ".join(
|
||||||
|
["%s(%d)" % (k, v) for k, v in counts.items()]))
|
||||||
|
|
||||||
|
# VERSION: print the id of the instances with the highest
|
||||||
|
# version for each model - this allows to spot possible import issues
|
||||||
|
self.print_line()
|
||||||
|
self.log("version",
|
||||||
|
"5 highest version numbers for each handleref type")
|
||||||
|
self.print_line()
|
||||||
|
for model in MODELS:
|
||||||
|
inst = model.objects.order_by("-version")[:5]
|
||||||
|
self.log(model.handleref.tag, ",".join(
|
||||||
|
["%d v=%d" % (o.id, o.version) for o in inst]))
|
||||||
|
|
||||||
|
# Find orphaned elements
|
||||||
|
ixlan = pdbm.IXLan.objects.filter(
|
||||||
|
status="ok", ix__status="deleted").select_related("ix").count()
|
||||||
|
if ixlan > 0:
|
||||||
|
print "%d orphaned ixlans (ix status='deleted')" % ixlan
|
||||||
|
|
||||||
|
ixfac = pdbm.InternetExchangeFacility.objects.filter(
|
||||||
|
status="ok", ix__status="deleted").select_related("ix").count()
|
||||||
|
if ixfac > 0:
|
||||||
|
print "%d orphaned ixfacs (ix status='deleted')" % ixfac
|
||||||
|
|
||||||
|
ixfac = pdbm.InternetExchangeFacility.objects.filter(
|
||||||
|
status="ok",
|
||||||
|
facility__status="deleted").select_related("facility").count()
|
||||||
|
if ixfac > 0:
|
||||||
|
print "%d orphaned ixfacs (fac status='deleted')" % ixfac
|
||||||
|
|
||||||
|
netfac = pdbm.NetworkFacility.objects.filter(
|
||||||
|
status="ok",
|
||||||
|
network__status="deleted").select_related("network").count()
|
||||||
|
if netfac > 0:
|
||||||
|
print "%d orphaned netfacs (net status='deleted')" % netfac
|
||||||
|
|
||||||
|
netfac = pdbm.NetworkFacility.objects.filter(
|
||||||
|
status="ok",
|
||||||
|
facility__status="deleted").select_related("facility").count()
|
||||||
|
if netfac > 0:
|
||||||
|
print "%d orphaned netfacs (fac status='deleted')" % netfac
|
||||||
|
|
||||||
|
poc = pdbm.NetworkContact.objects.filter(
|
||||||
|
status="ok",
|
||||||
|
network__status="deleted").select_related("network").count()
|
||||||
|
if poc > 0:
|
||||||
|
print "%d orphaned poc (net status='deleted')" % poc
|
||||||
|
|
||||||
|
netixlan = pdbm.NetworkIXLan.objects.filter(
|
||||||
|
status="ok",
|
||||||
|
network__status="deleted").select_related("network").count()
|
||||||
|
if netixlan > 0:
|
||||||
|
print "%d orphaned netixlans (net status='deleted')" % netixlan
|
||||||
|
|
||||||
|
netixlan = pdbm.NetworkIXLan.objects.filter(
|
||||||
|
status="ok",
|
||||||
|
ixlan__status="deleted").select_related("ixlan").count()
|
||||||
|
if netixlan > 0:
|
||||||
|
print "%d orphaned netixlans (ixlan status='deleted')" % netixlan
|
||||||
|
|
||||||
|
ixpfx = pdbm.IXLanPrefix.objects.filter(
|
||||||
|
status="ok",
|
||||||
|
ixlan__status="deleted").select_related("ixlan").count()
|
||||||
|
if ixpfx:
|
||||||
|
print "%d orphaned ixpfxs (ixlan status='deleted')" % ixpfx
|
||||||
|
|
||||||
|
for model in [pdbm.Network, pdbm.InternetExchange, pdbm.Facility]:
|
||||||
|
count = model.objects.filter(
|
||||||
|
status="ok",
|
||||||
|
org__status="deleted").select_related("org").count()
|
||||||
|
if count > 0:
|
||||||
|
print "%d orphaned %ss (org status='deleted')" % (
|
||||||
|
count, model.handleref.tag)
|
91
peeringdb_server/management/commands/pdb_undelete.py
Normal file
91
peeringdb_server/management/commands/pdb_undelete.py
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from peeringdb_server.models import REFTAG_MAP
|
||||||
|
|
||||||
|
import reversion
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Undo object deletion"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("reftag", nargs="?",
|
||||||
|
help="object reftag (net, ix, fac etc..)")
|
||||||
|
parser.add_argument("id", nargs="?", help="object id")
|
||||||
|
parser.add_argument("version_id", nargs="?",
|
||||||
|
help="object version id where it was deleted")
|
||||||
|
parser.add_argument('--commit', action='store_true',
|
||||||
|
help="will commit the changes")
|
||||||
|
|
||||||
|
def log(self, msg):
|
||||||
|
if self.commit:
|
||||||
|
self.stdout.write(msg)
|
||||||
|
else:
|
||||||
|
self.stdout.write("[pretend] {}".format(msg))
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
self.commit = options.get("commit", False)
|
||||||
|
self.version_id = options.get("version_id")
|
||||||
|
version = reversion.models.Version.objects.get(id=self.version_id)
|
||||||
|
self.date = version.revision.date_created
|
||||||
|
self.log("UNDELETING FROM DATE: {}".format(self.date))
|
||||||
|
self.undelete(options.get("reftag"), options.get("id"))
|
||||||
|
|
||||||
|
def undelete(self, reftag, _id, parent=None, date=None):
|
||||||
|
cls = REFTAG_MAP.get(reftag)
|
||||||
|
obj = cls.objects.get(id=_id)
|
||||||
|
|
||||||
|
if date:
|
||||||
|
version = reversion.models.Version.objects.get_for_object(
|
||||||
|
obj).filter(revision__date_created__lt=date).order_by(
|
||||||
|
"revision__date_created").last()
|
||||||
|
try:
|
||||||
|
status = json.loads(
|
||||||
|
version.serialized_data)[0].get("fields")["status"]
|
||||||
|
except:
|
||||||
|
status = None
|
||||||
|
if status == "deleted":
|
||||||
|
self.log(
|
||||||
|
"{} was already deleted at snapshot, skipping ..".format(
|
||||||
|
obj))
|
||||||
|
return
|
||||||
|
|
||||||
|
can_undelete_obj = True
|
||||||
|
|
||||||
|
for field in cls._meta.get_fields():
|
||||||
|
if field.is_relation:
|
||||||
|
if field.many_to_one:
|
||||||
|
# relation parent
|
||||||
|
try:
|
||||||
|
relation = getattr(obj, field.name)
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
if relation.status == "deleted" and relation != parent:
|
||||||
|
can_undelete_obj = False
|
||||||
|
self.log(
|
||||||
|
"Cannot undelete {}, dependent relation marked as deleted: {}".
|
||||||
|
format(obj, relation))
|
||||||
|
|
||||||
|
if not can_undelete_obj:
|
||||||
|
return
|
||||||
|
|
||||||
|
if obj.status == "deleted":
|
||||||
|
obj.status = "ok"
|
||||||
|
self.log("Undeleting {}".format(obj))
|
||||||
|
if self.commit:
|
||||||
|
obj.save()
|
||||||
|
|
||||||
|
for field in cls._meta.get_fields():
|
||||||
|
if field.is_relation:
|
||||||
|
if not field.many_to_one:
|
||||||
|
# relation child
|
||||||
|
try:
|
||||||
|
relation = getattr(obj, field.name)
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
if not hasattr(field.related_model, "ref_tag"):
|
||||||
|
continue
|
||||||
|
for child in relation.filter(updated__gte=self.date):
|
||||||
|
self.undelete(child.ref_tag, child.id, obj,
|
||||||
|
date=self.date)
|
63
peeringdb_server/management/commands/pdb_whois.py
Normal file
63
peeringdb_server/management/commands/pdb_whois.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ._db_command import CommandError, DBCommand
|
||||||
|
|
||||||
|
from peeringdb.whois import WhoisFormat
|
||||||
|
from peeringdb_server import models
|
||||||
|
from peeringdb_server import serializers
|
||||||
|
from django_handleref import util
|
||||||
|
|
||||||
|
|
||||||
|
class Command(DBCommand):
|
||||||
|
args = '<customer id>'
|
||||||
|
help = 'command line whois'
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("ref", nargs="?", type=str)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
log = logging.getLogger('pdb.script.whois')
|
||||||
|
|
||||||
|
# FIXME - ignore multiple args for now
|
||||||
|
args = options.get("ref")
|
||||||
|
|
||||||
|
try:
|
||||||
|
(ref_tag, pk) = util.split_ref(args)
|
||||||
|
except ValueError:
|
||||||
|
log.error("Unknown query type '%s'" % (args))
|
||||||
|
return
|
||||||
|
# TODO
|
||||||
|
raise CommandError('unk query')
|
||||||
|
|
||||||
|
model = None
|
||||||
|
|
||||||
|
# TODO needs the client whois typ resolver in a better place for reuse
|
||||||
|
# probably easiest to just map reftag to pk name
|
||||||
|
if ref_tag in models.REFTAG_MAP:
|
||||||
|
model = models.REFTAG_MAP[ref_tag]
|
||||||
|
Serializer = serializers.REFTAG_MAP[ref_tag]
|
||||||
|
obj = Serializer.prefetch_related(model.objects, None,
|
||||||
|
depth=2).get(pk=pk)
|
||||||
|
|
||||||
|
elif ref_tag == 'as':
|
||||||
|
model = models.REFTAG_MAP['net']
|
||||||
|
Serializer = serializers.REFTAG_MAP['net']
|
||||||
|
obj = Serializer.prefetch_related(model.objects, None,
|
||||||
|
depth=2).get(asn=pk)
|
||||||
|
|
||||||
|
|
||||||
|
# data = cls(obj).data
|
||||||
|
|
||||||
|
# TODO doesn't work on client
|
||||||
|
# elif ref_tag == 'ixnets':
|
||||||
|
|
||||||
|
if not model:
|
||||||
|
msg = "Unknown ref tag: {}".format(ref_tag)
|
||||||
|
log.error("Unknown ref tag: %s" % ref_tag)
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
data = Serializer(obj).data
|
||||||
|
fmt = WhoisFormat()
|
||||||
|
fmt. print(obj._handleref.tag, data)
|
845
peeringdb_server/migrations/0001_initial.py
Normal file
845
peeringdb_server/migrations/0001_initial.py
Normal file
@@ -0,0 +1,845 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by Django 1.11.4 on 2017-08-03 22:13
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
import django.contrib.auth.models
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import django.db.models.manager
|
||||||
|
import django.utils.timezone
|
||||||
|
import django_countries.fields
|
||||||
|
import django_handleref.models
|
||||||
|
import django_inet.models
|
||||||
|
import django_peeringdb.models.abstract
|
||||||
|
import peeringdb_server.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('contenttypes', '0002_remove_content_type_name'),
|
||||||
|
('auth', '0008_alter_user_username_max_length'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='User',
|
||||||
|
fields=[
|
||||||
|
('id',
|
||||||
|
models.AutoField(auto_created=True, primary_key=True,
|
||||||
|
serialize=False, verbose_name='ID')),
|
||||||
|
('password',
|
||||||
|
models.CharField(max_length=128, verbose_name='password')),
|
||||||
|
('last_login',
|
||||||
|
models.DateTimeField(blank=True, null=True,
|
||||||
|
verbose_name='last login')),
|
||||||
|
('is_superuser',
|
||||||
|
models.BooleanField(
|
||||||
|
default=False, help_text=
|
||||||
|
'Designates that this user has all permissions without explicitly assigning them.',
|
||||||
|
verbose_name='superuser status')),
|
||||||
|
('username',
|
||||||
|
models.CharField(
|
||||||
|
help_text='Required. Letters, digits and [@.+-/_=|] only.',
|
||||||
|
max_length=254, unique=True, validators=[
|
||||||
|
django.core.validators.RegexValidator(
|
||||||
|
b'^[\\w\\.@+-=|/]+$', 'Enter a valid username.',
|
||||||
|
b'invalid', flags=32)
|
||||||
|
], verbose_name='username')),
|
||||||
|
('email',
|
||||||
|
models.EmailField(max_length=254,
|
||||||
|
verbose_name='email address')),
|
||||||
|
('first_name',
|
||||||
|
models.CharField(blank=True, max_length=254,
|
||||||
|
verbose_name='first name')),
|
||||||
|
('last_name',
|
||||||
|
models.CharField(blank=True, max_length=254,
|
||||||
|
verbose_name='last name')),
|
||||||
|
('is_staff',
|
||||||
|
models.BooleanField(
|
||||||
|
default=False, help_text=
|
||||||
|
'Designates whether the user can log into admin site.',
|
||||||
|
verbose_name='staff status')),
|
||||||
|
('is_active',
|
||||||
|
models.BooleanField(
|
||||||
|
default=True, help_text=
|
||||||
|
'Designates whether this user should be treated as active. Unselect this instead of deleting accounts.',
|
||||||
|
verbose_name='active')),
|
||||||
|
('date_joined',
|
||||||
|
models.DateTimeField(default=django.utils.timezone.now,
|
||||||
|
verbose_name='date joined')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('status',
|
||||||
|
models.CharField(default=b'ok', max_length=254,
|
||||||
|
verbose_name='status')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_user',
|
||||||
|
'verbose_name': 'user',
|
||||||
|
'verbose_name_plural': 'users',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('objects', django.contrib.auth.models.UserManager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Facility',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('status',
|
||||||
|
models.CharField(blank=True, max_length=255,
|
||||||
|
verbose_name='Status')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('version', models.IntegerField(default=0)),
|
||||||
|
('address1', models.CharField(blank=True, max_length=255)),
|
||||||
|
('address2', models.CharField(blank=True, max_length=255)),
|
||||||
|
('city', models.CharField(blank=True, max_length=255)),
|
||||||
|
('state', models.CharField(blank=True, max_length=255)),
|
||||||
|
('zipcode', models.CharField(blank=True, max_length=48)),
|
||||||
|
('country',
|
||||||
|
django_countries.fields.CountryField(blank=True,
|
||||||
|
max_length=2)),
|
||||||
|
('name', models.CharField(max_length=255, unique=True)),
|
||||||
|
('website',
|
||||||
|
django_peeringdb.models.abstract.URLField(
|
||||||
|
blank=True, max_length=255)),
|
||||||
|
('clli', models.CharField(blank=True, max_length=18)),
|
||||||
|
('rencode', models.CharField(blank=True, max_length=18)),
|
||||||
|
('npanxx', models.CharField(blank=True, max_length=21)),
|
||||||
|
('notes', models.TextField(blank=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
'db_table': 'peeringdb_facility',
|
||||||
|
'verbose_name_plural': 'Facilities',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='InternetExchange',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('status',
|
||||||
|
models.CharField(blank=True, max_length=255,
|
||||||
|
verbose_name='Status')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('version', models.IntegerField(default=0)),
|
||||||
|
('name', models.CharField(max_length=64, unique=True)),
|
||||||
|
('name_long', models.CharField(blank=True, max_length=254)),
|
||||||
|
('city', models.CharField(max_length=192)),
|
||||||
|
('country',
|
||||||
|
django_countries.fields.CountryField(max_length=2)),
|
||||||
|
('notes', models.TextField(blank=True)),
|
||||||
|
('region_continent',
|
||||||
|
models.CharField(
|
||||||
|
choices=[(b'North America', b'North America'),
|
||||||
|
(b'Asia Pacific', b'Asia Pacific'), (b'Europe',
|
||||||
|
b'Europe'),
|
||||||
|
(b'South America',
|
||||||
|
b'South America'), (b'Africa', b'Africa'),
|
||||||
|
(b'Australia', b'Australia'), (b'Middle East',
|
||||||
|
b'Middle East')],
|
||||||
|
max_length=255)),
|
||||||
|
('media',
|
||||||
|
models.CharField(
|
||||||
|
choices=[(b'Ethernet', b'Ethernet'), (b'ATM', b'ATM'),
|
||||||
|
(b'Multiple', b'Multiple')], max_length=128)),
|
||||||
|
('proto_unicast', models.BooleanField(default=False)),
|
||||||
|
('proto_multicast', models.BooleanField(default=False)),
|
||||||
|
('proto_ipv6', models.BooleanField(default=False)),
|
||||||
|
('website',
|
||||||
|
django_peeringdb.models.abstract.URLField(
|
||||||
|
blank=True, max_length=255)),
|
||||||
|
('url_stats',
|
||||||
|
django_peeringdb.models.abstract.URLField(
|
||||||
|
blank=True, max_length=255)),
|
||||||
|
('tech_email', models.EmailField(blank=True, max_length=254)),
|
||||||
|
('tech_phone', models.CharField(blank=True, max_length=192)),
|
||||||
|
('policy_email', models.EmailField(blank=True,
|
||||||
|
max_length=254)),
|
||||||
|
('policy_phone', models.CharField(blank=True, max_length=192)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
'db_table': 'peeringdb_ix',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='InternetExchangeFacility',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('status',
|
||||||
|
models.CharField(blank=True, max_length=255,
|
||||||
|
verbose_name='Status')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('version', models.IntegerField(default=0)),
|
||||||
|
('facility',
|
||||||
|
models.ForeignKey(default=0,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='ixfac_set',
|
||||||
|
to='peeringdb_server.Facility')),
|
||||||
|
('ix',
|
||||||
|
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='ixfac_set',
|
||||||
|
to='peeringdb_server.InternetExchange')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_ix_facility',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='IXLan',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('status',
|
||||||
|
models.CharField(blank=True, max_length=255,
|
||||||
|
verbose_name='Status')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('version', models.IntegerField(default=0)),
|
||||||
|
('name', models.CharField(blank=True, max_length=255)),
|
||||||
|
('descr', models.TextField(blank=True)),
|
||||||
|
('mtu', models.PositiveIntegerField(blank=True, null=True)),
|
||||||
|
('vlan', models.PositiveIntegerField(blank=True, null=True)),
|
||||||
|
('dot1q_support', models.BooleanField(default=False)),
|
||||||
|
('rs_asn',
|
||||||
|
django_inet.models.ASNField(blank=True, default=0,
|
||||||
|
null=True)),
|
||||||
|
('arp_sponge',
|
||||||
|
django_inet.models.MacAddressField(blank=True, max_length=17,
|
||||||
|
null=True, unique=True)),
|
||||||
|
('ix',
|
||||||
|
models.ForeignKey(default=0,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='ixlan_set',
|
||||||
|
to='peeringdb_server.InternetExchange')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_ixlan',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='IXLanPrefix',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('status',
|
||||||
|
models.CharField(blank=True, max_length=255,
|
||||||
|
verbose_name='Status')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('version', models.IntegerField(default=0)),
|
||||||
|
('notes', models.CharField(blank=True, max_length=255)),
|
||||||
|
('protocol',
|
||||||
|
models.CharField(choices=[(b'IPv4', b'IPv4'), (b'IPv6',
|
||||||
|
b'IPv6')],
|
||||||
|
max_length=64)),
|
||||||
|
('prefix',
|
||||||
|
django_inet.models.IPPrefixField(max_length=43, unique=True)),
|
||||||
|
('ixlan',
|
||||||
|
models.ForeignKey(
|
||||||
|
default=0, on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='ixpfx_set', to='peeringdb_server.IXLan')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
'db_table': 'peeringdb_ixlan_prefix',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Network',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('status',
|
||||||
|
models.CharField(blank=True, max_length=255,
|
||||||
|
verbose_name='Status')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('version', models.IntegerField(default=0)),
|
||||||
|
('asn', django_inet.models.ASNField(unique=True)),
|
||||||
|
('name', models.CharField(max_length=255, unique=True)),
|
||||||
|
('aka', models.CharField(blank=True, max_length=255)),
|
||||||
|
('irr_as_set', models.CharField(blank=True, max_length=255)),
|
||||||
|
('website',
|
||||||
|
django_peeringdb.models.abstract.URLField(
|
||||||
|
blank=True, max_length=255)),
|
||||||
|
('looking_glass',
|
||||||
|
django_peeringdb.models.abstract.URLField(
|
||||||
|
blank=True, max_length=255)),
|
||||||
|
('route_server',
|
||||||
|
django_peeringdb.models.abstract.URLField(
|
||||||
|
blank=True, max_length=255)),
|
||||||
|
('notes', models.TextField(blank=True)),
|
||||||
|
('notes_private', models.TextField(blank=True)),
|
||||||
|
('info_traffic',
|
||||||
|
models.CharField(blank=True, choices=[
|
||||||
|
(b'', b'Not Disclosed'), (b'0-20 Mbps', b'0-20 Mbps'),
|
||||||
|
(b'20-100Mbps', b'20-100Mbps'), (b'100-1000Mbps',
|
||||||
|
b'100-1000Mbps'),
|
||||||
|
(b'1-5Gbps', b'1-5Gbps'), (b'5-10Gbps',
|
||||||
|
b'5-10Gbps'), (b'10-20Gbps',
|
||||||
|
b'10-20Gbps'),
|
||||||
|
(b'20-50 Gbps',
|
||||||
|
b'20-50 Gbps'), (b'50-100 Gbps',
|
||||||
|
b'50-100 Gbps'), (b'100+ Gbps',
|
||||||
|
b'100+ Gbps'),
|
||||||
|
(b'100-200 Gbps',
|
||||||
|
b'100-200 Gbps'), (b'200-300 Gbps',
|
||||||
|
b'200-300 Gbps'), (b'300-500 Gbps',
|
||||||
|
b'300-500 Gbps'),
|
||||||
|
(b'500-1000 Gbps',
|
||||||
|
b'500-1000 Gbps'), (b'1 Tbps+',
|
||||||
|
b'1 Tbps+'), (b'10 Tbps+',
|
||||||
|
b'10 Tbps+')
|
||||||
|
], max_length=39)),
|
||||||
|
('info_ratio',
|
||||||
|
models.CharField(blank=True, choices=[
|
||||||
|
(b'', b'Not Disclosed'), (b'Not Disclosed',
|
||||||
|
b'Not Disclosed'),
|
||||||
|
(b'Heavy Outbound',
|
||||||
|
b'Heavy Outbound'), (b'Mostly Outbound',
|
||||||
|
b'Mostly Outbound'), (b'Balanced',
|
||||||
|
b'Balanced'),
|
||||||
|
(b'Mostly Inbound', b'Mostly Inbound'), (b'Heavy Inbound',
|
||||||
|
b'Heavy Inbound')
|
||||||
|
], default=b'Not Disclosed', max_length=45)),
|
||||||
|
('info_scope',
|
||||||
|
models.CharField(blank=True, choices=[
|
||||||
|
(b'', b'Not Disclosed'), (b'Not Disclosed',
|
||||||
|
b'Not Disclosed'),
|
||||||
|
(b'Regional',
|
||||||
|
b'Regional'), (b'North America',
|
||||||
|
b'North America'), (b'Asia Pacific',
|
||||||
|
b'Asia Pacific'),
|
||||||
|
(b'Europe', b'Europe'), (b'South America',
|
||||||
|
b'South America'), (b'Africa',
|
||||||
|
b'Africa'),
|
||||||
|
(b'Australia',
|
||||||
|
b'Australia'), (b'Middle East',
|
||||||
|
b'Middle East'), (b'Global', b'Global')
|
||||||
|
], default=b'Not Disclosed', max_length=39)),
|
||||||
|
('info_type',
|
||||||
|
models.CharField(blank=True, choices=[
|
||||||
|
(b'', b'Not Disclosed'), (b'Not Disclosed',
|
||||||
|
b'Not Disclosed'),
|
||||||
|
(b'NSP', b'NSP'), (b'Content',
|
||||||
|
b'Content'), (b'Cable/DSL/ISP',
|
||||||
|
b'Cable/DSL/ISP'),
|
||||||
|
(b'Enterprise', b'Enterprise'), (b'Educational/Research',
|
||||||
|
b'Educational/Research'),
|
||||||
|
(b'Non-Profit', b'Non-Profit'), (b'Route Server',
|
||||||
|
b'Route Server')
|
||||||
|
], default=b'Not Disclosed', max_length=60)),
|
||||||
|
('info_prefixes4',
|
||||||
|
models.PositiveIntegerField(blank=True, null=True)),
|
||||||
|
('info_prefixes6',
|
||||||
|
models.PositiveIntegerField(blank=True, null=True)),
|
||||||
|
('info_unicast', models.BooleanField(default=False)),
|
||||||
|
('info_multicast', models.BooleanField(default=False)),
|
||||||
|
('info_ipv6', models.BooleanField(default=False)),
|
||||||
|
('policy_url',
|
||||||
|
django_peeringdb.models.abstract.URLField(
|
||||||
|
blank=True, max_length=255)),
|
||||||
|
('policy_general',
|
||||||
|
models.CharField(blank=True,
|
||||||
|
choices=[(b'Open', b'Open'), (b'Selective',
|
||||||
|
b'Selective'),
|
||||||
|
(b'Restrictive', b'Restrictive'),
|
||||||
|
(b'No', b'No')], max_length=72)),
|
||||||
|
('policy_locations',
|
||||||
|
models.CharField(
|
||||||
|
blank=True,
|
||||||
|
choices=[(b'Not Required',
|
||||||
|
b'Not Required'), (b'Preferred', b'Preferred'),
|
||||||
|
(b'Required - US',
|
||||||
|
b'Required - US'), (b'Required - EU',
|
||||||
|
b'Required - EU'),
|
||||||
|
(b'Required - International',
|
||||||
|
b'Required - International')], max_length=72)),
|
||||||
|
('policy_ratio', models.BooleanField(default=False)),
|
||||||
|
('policy_contracts',
|
||||||
|
models.CharField(blank=True,
|
||||||
|
choices=[(b'Not Required', b'Not Required'),
|
||||||
|
(b'Private Only', b'Private Only'),
|
||||||
|
(b'Required',
|
||||||
|
b'Required')], max_length=36)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
'db_table': 'peeringdb_network',
|
||||||
|
'verbose_name_plural': 'Networks',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='NetworkContact',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('status',
|
||||||
|
models.CharField(blank=True, max_length=255,
|
||||||
|
verbose_name='Status')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('version', models.IntegerField(default=0)),
|
||||||
|
('role',
|
||||||
|
models.CharField(
|
||||||
|
choices=[(b'Abuse', b'Abuse'), (b'Maintenance',
|
||||||
|
b'Maintenance'),
|
||||||
|
(b'Policy', b'Policy'), (b'Technical',
|
||||||
|
b'Technical'),
|
||||||
|
(b'NOC', b'NOC'), (b'Public Relations',
|
||||||
|
b'Public Relations'),
|
||||||
|
(b'Sales', b'Sales')], max_length=27)),
|
||||||
|
('visible',
|
||||||
|
models.CharField(choices=[(b'Private', b'Private'),
|
||||||
|
(b'Users', b'Users'), (b'Public',
|
||||||
|
b'Public')],
|
||||||
|
default=b'Public', max_length=64)),
|
||||||
|
('name', models.CharField(blank=True, max_length=254)),
|
||||||
|
('phone', models.CharField(blank=True, max_length=100)),
|
||||||
|
('email', models.EmailField(blank=True, max_length=254)),
|
||||||
|
('url',
|
||||||
|
django_peeringdb.models.abstract.URLField(
|
||||||
|
blank=True, max_length=255)),
|
||||||
|
('network',
|
||||||
|
models.ForeignKey(
|
||||||
|
default=0, on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='poc_set', to='peeringdb_server.Network')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_network_contact',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='NetworkFacility',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('status',
|
||||||
|
models.CharField(blank=True, max_length=255,
|
||||||
|
verbose_name='Status')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('version', models.IntegerField(default=0)),
|
||||||
|
('local_asn', django_inet.models.ASNField(
|
||||||
|
blank=True, null=True)),
|
||||||
|
('avail_sonet', models.BooleanField(default=False)),
|
||||||
|
('avail_ethernet', models.BooleanField(default=False)),
|
||||||
|
('avail_atm', models.BooleanField(default=False)),
|
||||||
|
('facility',
|
||||||
|
models.ForeignKey(default=0,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='netfac_set',
|
||||||
|
to='peeringdb_server.Facility')),
|
||||||
|
('network',
|
||||||
|
models.ForeignKey(default=0,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='netfac_set',
|
||||||
|
to='peeringdb_server.Network')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_network_facility',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='NetworkIXLan',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('status',
|
||||||
|
models.CharField(blank=True, max_length=255,
|
||||||
|
verbose_name='Status')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('version', models.IntegerField(default=0)),
|
||||||
|
('asn', django_inet.models.ASNField()),
|
||||||
|
('ipaddr4',
|
||||||
|
django_inet.models.IPAddressField(blank=True, max_length=39,
|
||||||
|
null=True)),
|
||||||
|
('ipaddr6',
|
||||||
|
django_inet.models.IPAddressField(blank=True, max_length=39,
|
||||||
|
null=True)),
|
||||||
|
('is_rs_peer', models.BooleanField(default=False)),
|
||||||
|
('notes', models.CharField(blank=True, max_length=255)),
|
||||||
|
('speed', models.PositiveIntegerField()),
|
||||||
|
('ixlan',
|
||||||
|
models.ForeignKey(default=0,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='netixlan_set',
|
||||||
|
to='peeringdb_server.IXLan')),
|
||||||
|
('network',
|
||||||
|
models.ForeignKey(default=0,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='netixlan_set',
|
||||||
|
to='peeringdb_server.Network')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_network_ixlan',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Organization',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('status',
|
||||||
|
models.CharField(blank=True, max_length=255,
|
||||||
|
verbose_name='Status')),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('updated',
|
||||||
|
django_handleref.models.UpdatedDateTimeField(
|
||||||
|
auto_now=True, verbose_name='Updated')),
|
||||||
|
('version', models.IntegerField(default=0)),
|
||||||
|
('address1', models.CharField(blank=True, max_length=255)),
|
||||||
|
('address2', models.CharField(blank=True, max_length=255)),
|
||||||
|
('city', models.CharField(blank=True, max_length=255)),
|
||||||
|
('state', models.CharField(blank=True, max_length=255)),
|
||||||
|
('zipcode', models.CharField(blank=True, max_length=48)),
|
||||||
|
('country',
|
||||||
|
django_countries.fields.CountryField(blank=True,
|
||||||
|
max_length=2)),
|
||||||
|
('name', models.CharField(max_length=255, unique=True)),
|
||||||
|
('website',
|
||||||
|
django_peeringdb.models.abstract.URLField(
|
||||||
|
blank=True, max_length=255)),
|
||||||
|
('notes', models.TextField(blank=True)),
|
||||||
|
('logo',
|
||||||
|
models.FileField(
|
||||||
|
blank=True, help_text=
|
||||||
|
b'Allows you to upload and set a logo image file for this organization',
|
||||||
|
null=True, upload_to=b'logos/')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
'db_table': 'peeringdb_organization',
|
||||||
|
'verbose_name_plural': 'Organizations',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='OrganizationMerge',
|
||||||
|
fields=[
|
||||||
|
('id',
|
||||||
|
models.AutoField(auto_created=True, primary_key=True,
|
||||||
|
serialize=False, verbose_name='ID')),
|
||||||
|
('created',
|
||||||
|
models.DateTimeField(auto_now_add=True,
|
||||||
|
verbose_name='Merged on')),
|
||||||
|
('from_org',
|
||||||
|
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='merged_to',
|
||||||
|
to='peeringdb_server.Organization')),
|
||||||
|
('to_org',
|
||||||
|
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='merged_from',
|
||||||
|
to='peeringdb_server.Organization')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_organization_merge',
|
||||||
|
'verbose_name': 'Organization Merge',
|
||||||
|
'verbose_name_plural': 'Organization Merges',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='OrganizationMergeEntity',
|
||||||
|
fields=[
|
||||||
|
('id',
|
||||||
|
models.AutoField(auto_created=True, primary_key=True,
|
||||||
|
serialize=False, verbose_name='ID')),
|
||||||
|
('object_id', models.PositiveIntegerField()),
|
||||||
|
('note', models.CharField(blank=True, max_length=32,
|
||||||
|
null=True)),
|
||||||
|
('content_type',
|
||||||
|
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to='contenttypes.ContentType')),
|
||||||
|
('merge',
|
||||||
|
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='entities',
|
||||||
|
to='peeringdb_server.OrganizationMerge')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_organization_merge_entity',
|
||||||
|
'verbose_name': 'Organization Merge: Entity',
|
||||||
|
'verbose_name_plural': 'Organization Merge: Entities',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Sponsorship',
|
||||||
|
fields=[
|
||||||
|
('id',
|
||||||
|
models.AutoField(auto_created=True, primary_key=True,
|
||||||
|
serialize=False, verbose_name='ID')),
|
||||||
|
('start_date',
|
||||||
|
models.DateTimeField(
|
||||||
|
default=peeringdb_server.models.default_time_s,
|
||||||
|
verbose_name='Sponsorship starts on')),
|
||||||
|
('end_date',
|
||||||
|
models.DateTimeField(
|
||||||
|
default=peeringdb_server.models.default_time_e,
|
||||||
|
verbose_name='Sponsorship ends on')),
|
||||||
|
('notify_date',
|
||||||
|
models.DateTimeField(
|
||||||
|
blank=True, null=True,
|
||||||
|
verbose_name='Expiration notification sent on')),
|
||||||
|
('level',
|
||||||
|
models.PositiveIntegerField(
|
||||||
|
choices=[(1, 'Silver'), (2, 'Gold'), (3, 'Platinum'),
|
||||||
|
(4, 'Diamond')], default=1)),
|
||||||
|
('url',
|
||||||
|
models.URLField(
|
||||||
|
blank=True,
|
||||||
|
help_text=
|
||||||
|
'If specified clicking the sponsorship will take the user to this location',
|
||||||
|
null=True, verbose_name='URL')),
|
||||||
|
('logo',
|
||||||
|
models.FileField(
|
||||||
|
blank=True, help_text=
|
||||||
|
b'Allows you to upload and set a logo image file for this sponsorship',
|
||||||
|
null=True, upload_to=b'logos/')),
|
||||||
|
('org',
|
||||||
|
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='sponsorships',
|
||||||
|
to='peeringdb_server.Organization')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_sponsorship',
|
||||||
|
'verbose_name': 'Sponsorship',
|
||||||
|
'verbose_name_plural': 'Sponsorships',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='UserOrgAffiliationRequest',
|
||||||
|
fields=[
|
||||||
|
('id',
|
||||||
|
models.AutoField(auto_created=True, primary_key=True,
|
||||||
|
serialize=False, verbose_name='ID')),
|
||||||
|
('org_name',
|
||||||
|
models.CharField(
|
||||||
|
blank=True,
|
||||||
|
help_text=b'The organization name entered by the user',
|
||||||
|
max_length=255, null=True)),
|
||||||
|
('asn',
|
||||||
|
django_inet.models.ASNField(
|
||||||
|
blank=True, help_text=b'The ASN entered by the user',
|
||||||
|
null=True)),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('status',
|
||||||
|
models.CharField(
|
||||||
|
choices=[(b'pending', b'Pending'), (b'approved',
|
||||||
|
b'Approved'),
|
||||||
|
(b'denied', b'Denied')],
|
||||||
|
help_text=b'Status of this request', max_length=254)),
|
||||||
|
('org',
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True, help_text=
|
||||||
|
b'This organization in our database that was derived from the provided ASN or organization name. If this is empty it means no matching organization was found.',
|
||||||
|
null=True, on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='affiliation_requests',
|
||||||
|
to='peeringdb_server.Organization')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_user_org_affil_request',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='VerificationQueueItem',
|
||||||
|
fields=[
|
||||||
|
('id',
|
||||||
|
models.AutoField(auto_created=True, primary_key=True,
|
||||||
|
serialize=False, verbose_name='ID')),
|
||||||
|
('object_id', models.PositiveIntegerField()),
|
||||||
|
('created',
|
||||||
|
django_handleref.models.CreatedDateTimeField(
|
||||||
|
auto_now_add=True, verbose_name='Created')),
|
||||||
|
('notified', models.BooleanField(default=False)),
|
||||||
|
('content_type',
|
||||||
|
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to='contenttypes.ContentType')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_verification_queue',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='UserPasswordReset',
|
||||||
|
fields=[
|
||||||
|
('user',
|
||||||
|
models.OneToOneField(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
primary_key=True, related_name='password_reset',
|
||||||
|
serialize=False, to=settings.AUTH_USER_MODEL)),
|
||||||
|
('token', models.CharField(max_length=255)),
|
||||||
|
('created', models.DateTimeField(auto_now_add=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_user_password_reset',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='verificationqueueitem',
|
||||||
|
name='user',
|
||||||
|
field=models.ForeignKey(
|
||||||
|
blank=True, help_text=
|
||||||
|
b'The item that this queue is attached to was created by this user',
|
||||||
|
null=True, on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='vqitems', to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='userorgaffiliationrequest',
|
||||||
|
name='user',
|
||||||
|
field=models.ForeignKey(
|
||||||
|
help_text=b'The user that made the request',
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='affiliation_requests',
|
||||||
|
to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='network',
|
||||||
|
name='org',
|
||||||
|
field=models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='net_set', to='peeringdb_server.Organization'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='internetexchange',
|
||||||
|
name='org',
|
||||||
|
field=models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='ix_set', to='peeringdb_server.Organization'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='facility',
|
||||||
|
name='org',
|
||||||
|
field=models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='fac_set', to='peeringdb_server.Organization'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='groups',
|
||||||
|
field=models.ManyToManyField(
|
||||||
|
blank=True, help_text=
|
||||||
|
'The groups this user belongs to. A user will get all permissions granted to each of their groups.',
|
||||||
|
related_name='user_set', related_query_name='user',
|
||||||
|
to='auth.Group', verbose_name='groups'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='user',
|
||||||
|
name='user_permissions',
|
||||||
|
field=models.ManyToManyField(
|
||||||
|
blank=True, help_text='Specific permissions for this user.',
|
||||||
|
related_name='user_set', related_query_name='user',
|
||||||
|
to='auth.Permission', verbose_name='user permissions'),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='DuplicateIPNetworkIXLan',
|
||||||
|
fields=[],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Duplicate IP',
|
||||||
|
'proxy': True,
|
||||||
|
'verbose_name_plural': 'Duplicate IPs',
|
||||||
|
'indexes': [],
|
||||||
|
},
|
||||||
|
bases=('peeringdb_server.networkixlan', ),
|
||||||
|
managers=[
|
||||||
|
('handleref', django.db.models.manager.Manager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='UserPermission',
|
||||||
|
fields=[],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'User Permission',
|
||||||
|
'proxy': True,
|
||||||
|
'verbose_name_plural': 'User Permissions',
|
||||||
|
'indexes': [],
|
||||||
|
},
|
||||||
|
bases=('peeringdb_server.user', ),
|
||||||
|
managers=[
|
||||||
|
('objects', django.contrib.auth.models.UserManager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='networkfacility',
|
||||||
|
unique_together=set([('network', 'facility', 'local_asn')]),
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='internetexchangefacility',
|
||||||
|
unique_together=set([('ix', 'facility')]),
|
||||||
|
),
|
||||||
|
]
|
47
peeringdb_server/migrations/0002_partnernship_model.py
Normal file
47
peeringdb_server/migrations/0002_partnernship_model.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by Django 1.11.4 on 2017-08-21 05:08
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('peeringdb_server', '0001_initial'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Partnership',
|
||||||
|
fields=[
|
||||||
|
('id',
|
||||||
|
models.AutoField(auto_created=True, primary_key=True,
|
||||||
|
serialize=False, verbose_name='ID')),
|
||||||
|
('level',
|
||||||
|
models.PositiveIntegerField(
|
||||||
|
choices=[(1, 'Data Validation Partner'),
|
||||||
|
(2, 'RIR Partner')], default=1)),
|
||||||
|
('url',
|
||||||
|
models.URLField(
|
||||||
|
blank=True, help_text=
|
||||||
|
'If specified clicking the partnership will take the user to this location',
|
||||||
|
null=True, verbose_name='URL')),
|
||||||
|
('logo',
|
||||||
|
models.FileField(
|
||||||
|
blank=True, help_text=
|
||||||
|
b'Allows you to upload and set a logo image file for this partnership',
|
||||||
|
null=True, upload_to=b'logos/')),
|
||||||
|
('org',
|
||||||
|
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name='partnerships',
|
||||||
|
to='peeringdb_server.Organization')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'peeringdb_partnership',
|
||||||
|
'verbose_name': 'Partnership',
|
||||||
|
'verbose_name_plural': 'Partnerships',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
@@ -0,0 +1,49 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by Django 1.11.4 on 2017-08-23 11:58
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('peeringdb_server', '0002_partnernship_model'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='facility',
|
||||||
|
name='lat',
|
||||||
|
field=models.DecimalField(blank=True, decimal_places=6,
|
||||||
|
help_text=b'Latitude', max_digits=9,
|
||||||
|
null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='facility',
|
||||||
|
name='lon',
|
||||||
|
field=models.DecimalField(blank=True, decimal_places=6,
|
||||||
|
help_text=b'Longitude', max_digits=9,
|
||||||
|
null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='organization',
|
||||||
|
name='lat',
|
||||||
|
field=models.DecimalField(blank=True, decimal_places=6,
|
||||||
|
help_text=b'Latitude', max_digits=9,
|
||||||
|
null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='organization',
|
||||||
|
name='lon',
|
||||||
|
field=models.DecimalField(blank=True, decimal_places=6,
|
||||||
|
help_text=b'Longitude', max_digits=9,
|
||||||
|
null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='partnership',
|
||||||
|
name='level',
|
||||||
|
field=models.PositiveIntegerField(choices=[(1, 'Data Validation'),
|
||||||
|
(2, 'RIR')], default=1),
|
||||||
|
),
|
||||||
|
]
|
38
peeringdb_server/migrations/0004_geocode_fields.py
Normal file
38
peeringdb_server/migrations/0004_geocode_fields.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by Django 1.11.4 on 2017-08-24 15:47
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('peeringdb_server', '0003_add_lat_lon_to_address_models'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='facility',
|
||||||
|
name='geocode_date',
|
||||||
|
field=models.DateTimeField(
|
||||||
|
blank=True, help_text=b'Last time of attempted geocode',
|
||||||
|
null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='facility',
|
||||||
|
name='geocode_error',
|
||||||
|
field=models.TextField(
|
||||||
|
blank=True,
|
||||||
|
help_text=b'Error message of previous geocode attempt',
|
||||||
|
null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='facility',
|
||||||
|
name='geocode_status',
|
||||||
|
field=models.BooleanField(
|
||||||
|
default=False, help_text=
|
||||||
|
b"Has this object's latitude and longitude been syncronized to it's address fields"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
35
peeringdb_server/migrations/0005_lat_lon_field_rename.py
Normal file
35
peeringdb_server/migrations/0005_lat_lon_field_rename.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by Django 1.11.4 on 2017-08-30 15:27
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('peeringdb_server', '0004_geocode_fields'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='facility',
|
||||||
|
old_name='lat',
|
||||||
|
new_name='latitude',
|
||||||
|
),
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='facility',
|
||||||
|
old_name='lon',
|
||||||
|
new_name='longitude',
|
||||||
|
),
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='organization',
|
||||||
|
old_name='lat',
|
||||||
|
new_name='latitude',
|
||||||
|
),
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='organization',
|
||||||
|
old_name='lon',
|
||||||
|
new_name='longitude',
|
||||||
|
),
|
||||||
|
]
|
23
peeringdb_server/migrations/0006_network_allow_ixp_update.py
Normal file
23
peeringdb_server/migrations/0006_network_allow_ixp_update.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by Django 1.11.4 on 2017-10-18 05:45
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('peeringdb_server', '0005_lat_lon_field_rename'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='network',
|
||||||
|
name='allow_ixp_update',
|
||||||
|
field=models.BooleanField(
|
||||||
|
default=False, help_text=
|
||||||
|
b'Sepcifies whether an ixp is allowed to add a netixlan entry for this network via their ixp_member data'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user