diff --git a/.env b/.env.template similarity index 96% rename from .env rename to .env.template index bf1b6a3ba..0aded8a1b 100644 --- a/.env +++ b/.env.template @@ -30,4 +30,4 @@ POSTGRES_HOST=db # To make the best use of CONCURRENCY, follow the guide https://rengine.wiki # MIN_CONCURRENCY=5 -MAX_CONCURRENCY=30 +MAX_CONCURRENCY=30 \ No newline at end of file diff --git a/.gitignore b/.gitignore index 73c01fdeb..e31155c92 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ local_settings.py db.sqlite3 db.sqlite3-journal media +.env ### Django.Python Stack ### # Byte-compiled / optimized / DLL files @@ -55,3 +56,5 @@ secret /secrets get-docker.sh + +filter_dns_leak.sh \ No newline at end of file diff --git a/db/Dockerfile b/db/Dockerfile new file mode 100644 index 000000000..6736cb104 --- /dev/null +++ b/db/Dockerfile @@ -0,0 +1,22 @@ +FROM postgres:latest + + +RUN apt-get update && \ + apt-get install -y git postgresql-server-dev-15 libpam0g-dev libreadline-dev build-essential libselinux1-dev libzstd-dev liblz4-dev zlib1g-dev libkrb5-dev && \ + git clone https://github.com/ossc-db/pg_bulkload && \ + cd pg_bulkload && \ + make USE_PGXS=1 && \ + make USE_PGXS=1 install + +# USER postgres +# WORKDIR "/tmp" +# RUN export PATH="/usr/lib/postgresql/15/bin/:$PATH" && \ +# git clone https://github.com/ossc-db/pg_bulkload && \ +# cd pg_bulkload && \ +# make installcheck + +ENTRYPOINT ["docker-entrypoint.sh"] + +EXPOSE 5432 +CMD ["postgres"] + diff --git a/db/imports/.placeholder b/db/imports/.placeholder new file mode 100644 index 000000000..e69de29bb diff --git a/db/scripts/bulk_insert_domains.sh b/db/scripts/bulk_insert_domains.sh new file mode 100755 index 000000000..798ead582 --- /dev/null +++ b/db/scripts/bulk_insert_domains.sh @@ -0,0 +1,226 @@ +#!/bin/bash + +sudo -v &>/dev/null + +domain_file=$1 +organization=$2 +web_server=https://localhost +engine_type=1 + +tput setaf 2; +echo "Bulk Domains Insertion" +nb_domain_to_insert=$(wc -l $domain_file | awk '{print $1}') + +tput setaf 6; echo "Found $nb_domain_to_insert domain(s) in $domain_file !" + +echo " " +tput setaf 4; echo "Quering last domain ID inserted ..." + +last_domain_id=$(sudo docker-compose exec db psql -t -U rengine -d rengine -c 'select max(id) from public.targetapp_domain;' | awk 'NF==1 {print $1}') +if [ -z "$last_domain_id" ] +then + last_domain_id=0 +fi +tmp_domain_id=$last_domain_id + +tput setaf 2; echo "Last domain ID inserted = $last_domain_id" + +timestamp=$(date +%s) +data_fname=/imports/domain_insertion_$timestamp.csv +ldata_fname=./db$data_fname +log_fname=/imports/domain_insertion_$timestamp.log +bad_fname=/imports/domain_insertion_$timestamp.bad +dup_fname=/imports/domain_insertion_$timestamp.dup + +echo " " +tput setaf 4; echo "Generating domain data file at '$ldata_fname'..." + +insert_date=$(date) +touch $ldata_fname +for domain in $(cat $domain_file) +do + ((last_domain_id=last_domain_id+1)) + ldomain="${domain,,}" + echo "$last_domain_id,$ldomain,,,,$insert_date,$insert_date," | tee -a $ldata_fname >/dev/null +done + +echo " " +tput setaf 4; echo "Creating pg_bulkload log files ..." +touch ./db$log_fname && chmod o+w ./db$log_fname +touch ./db$bad_fname && chmod o+w ./db$bad_fname +touch ./db$dup_fname && chmod o+w ./db$dup_fname + +echo " " +tput setaf 4; echo "Creating pg_bulkload extension ..." +sudo docker-compose exec db psql -U rengine -d rengine -c "CREATE EXTENSION pg_bulkload" 2>/dev/null + +echo " " +tput setaf 4; echo "Start domain instertion using pg_bulkload ..."; tput setaf 6; +sudo docker-compose exec db pg_bulkload \ + --infile=$data_fname \ + --output=public.targetapp_domain \ + --option="WRITER=PARALLEL" \ + --option="TYPE=CSV" \ + --option="DELIMITER=," \ + --option="DUPLICATE_ERRORS=-1" \ + --option="PARSE_ERRORS=-1" \ + --option="ON_DUPLICATE_KEEP=NEW" \ + --option="CHECK_CONSTRAINTS=YES" \ + -U rengine \ + -d rengine \ + --logfile=$log_fname \ + --parse-badfile=$bad_fname \ + --duplicate-badfile=$dup_fname + +echo " " +tput setaf 5; echo "Result log file available at './db$log_fname'" +tput setaf 5; echo "Bad records that cannot be parsed correctly available at './db$bad_fname'" +tput setaf 5; echo "Bad records that conflict with unique constraints available at './db$dup_fname'" + +echo " " +tput setaf 4; echo "Creating organization '$organization'..." +organization_id=$(sudo docker-compose exec db psql -t -U rengine -d rengine -c "insert into public.targetapp_organization(name, insert_date) values('$organization', now()) on conflict (name) do update set id=public.targetapp_organization.id, description=excluded.description returning id;" | awk 'NF==1 {print $1}') + +tput setaf 6; echo "$organization created with ID = $organization_id !" + + +end_domain_id=$(sudo docker-compose exec db psql -t -U rengine -d rengine -c 'select max(id) from public.targetapp_domain;' | awk 'NF==1 {print $1}') +if [ $end_domain_id -eq $tmp_domain_id ] +then + tput setaf 1; echo "No new domain imported, exiting ..." + exit +fi + +echo " " +tput setaf 4; echo "Quering last Organization <-> Domain relation id inserted ..." + +last_relation_id=$(sudo docker-compose exec db psql -t -U rengine -d rengine -c "select max(id) from public.targetapp_organization_domains;" | awk 'NF==1 {print $1}') +if [ -z "$last_relation_id" ] +then + last_relation_id=0 +fi + +tput setaf 2; echo "Last Organization <-> Domain relation id inserted = $last_relation_id" + +timestamp=$(date +%s) +data_fname=/imports/relation_insertion_$timestamp.csv +ldata_fname=./db$data_fname +log_fname=/imports/relation_insertion_$timestamp.log +bad_fname=/imports/relation_insertion_$timestamp.bad +dup_fname=/imports/relation_insertion_$timestamp.dup + +echo " " +tput setaf 4; echo "Generating relation data file at '$ldata_fname'..." + +touch $ldata_fname +last_domain_id=$(($tmp_domain_id+1)) +for domain_id in $(seq $last_domain_id $end_domain_id) +do + ((last_relation_id=last_relation_id+1)) + echo "$last_relation_id,$organization_id,$domain_id" | tee -a $ldata_fname >/dev/null +done + +echo " " +tput setaf 4; echo "Creating pg_bulkload log files ..." +touch ./db$log_fname && chmod o+w ./db$log_fname +touch ./db$bad_fname && chmod o+w ./db$bad_fname +touch ./db$dup_fname && chmod o+w ./db$dup_fname + +echo " " +tput setaf 4; echo "Start relation insertion using pg_bulkload ..."; tput setaf 6; +sudo docker-compose exec db pg_bulkload \ + --infile=$data_fname \ + --output=public.targetapp_organization_domains \ + --option="WRITER=PARALLEL" \ + --option="TYPE=CSV" \ + --option="DELIMITER=," \ + --option="DUPLICATE_ERRORS=-1" \ + --option="PARSE_ERRORS=-1" \ + --option="ON_DUPLICATE_KEEP=NEW" \ + --option="CHECK_CONSTRAINTS=YES" \ + -U rengine \ + -d rengine \ + --logfile=$log_fname \ + --parse-badfile=$bad_fname \ + --duplicate-badfile=$dup_fname + +echo " " +tput setaf 5; echo "Result log file available at './db$log_fname'" +tput setaf 5; echo "Bad records that cannot be parsed correctly available at './db$bad_fname'" +tput setaf 5; echo "Bad records that conflict with unique constraints available at './db$dup_fname'" + + +echo " " +tput setaf 4; echo "Quering last scan history id inserted ..." + +last_scanhistory_id=$(sudo docker-compose exec db psql -t -U rengine -d rengine -c "select max(id) from public.startscan_scanhistory;" | awk 'NF==1 {print $1}') +if [ -z "$last_scanhistory_id" ] +then + last_scanhistory_id=0 +fi +tmp_scanhistory_id=$last_scanhistory_id + +tput setaf 2; echo "Last scan history id inserted = $last_scanhistory_id" + +timestamp=$(date +%s) +data_fname=/imports/scanhistory_insertion_$timestamp.csv +ldata_fname=./db$data_fname +log_fname=/imports/scanhistory_insertion_$timestamp.log +bad_fname=/imports/scanhistory_insertion_$timestamp.bad +dup_fname=/imports/scanhistory_insertion_$timestamp.dup + +echo " " +tput setaf 4; echo "Generating scan history data file at '$ldata_fname'..." + +touch $ldata_fname +last_domain_id=$(($tmp_domain_id+1)) +for domain_id in $(seq $last_domain_id $end_domain_id) +do + ((last_scanhistory_id=last_scanhistory_id+1)) + echo "$last_scanhistory_id,$insert_date,-1,'','',False,False,False,False,False,False,True,,True,$domain_id,$engine_type,," | tee -a $ldata_fname >/dev/null +done + +echo " " +tput setaf 4; echo "Creating pg_bulkload log files ..." +touch ./db$log_fname && chmod o+w ./db$log_fname +touch ./db$bad_fname && chmod o+w ./db$bad_fname +touch ./db$dup_fname && chmod o+w ./db$dup_fname + +echo " " +tput setaf 4; echo "Start scan history insertion using pg_bulkload ..."; tput setaf 6; +sudo docker-compose exec db pg_bulkload \ + --infile=$data_fname \ + --output=public.startscan_scanhistory \ + --option="WRITER=PARALLEL" \ + --option="TYPE=CSV" \ + --option="DELIMITER=," \ + --option="DUPLICATE_ERRORS=-1" \ + --option="PARSE_ERRORS=-1" \ + --option="ON_DUPLICATE_KEEP=NEW" \ + --option="CHECK_CONSTRAINTS=YES" \ + -U rengine \ + -d rengine \ + --logfile=$log_fname \ + --parse-badfile=$bad_fname \ + --duplicate-badfile=$dup_fname + +echo " " +tput setaf 5; echo "Result log file available at './db$log_fname'" +tput setaf 5; echo "Bad records that cannot be parsed correctly available at './db$bad_fname'" +tput setaf 5; echo "Bad records that conflict with unique constraints available at './db$dup_fname'" + +echo " " +tput setaf 4; echo "Start scaning tasks ..." + +touch $ldata_fname +last_domain_id=$(($tmp_domain_id+1)) +last_scanhistory_id=$tmp_scanhistory_id +for domain_id in $(seq $last_domain_id $end_domain_id) +do + ((last_scanhistory_id=last_scanhistory_id+1)) + tput setaf 4; echo "Starting scan on domain id = $domain_id ..." + celery_id=$(sudo docker-compose exec celery celery -A reNgine -b redis://redis:6379/0 --result-backend redis://redis:6379/0 call reNgine.tasks.initiate_scan -a ["$domain_id","$last_scanhistory_id",0,"$engine_type"]) + tput setaf 4; echo "Update scan history with celery task id ('$celery_id')" + sudo docker-compose exec db psql -t -U rengine -d rengine -c "update public.startscan_scanhistory set celery_id ='$celery_id' where id = $last_scanhistory_id;" &>/dev/null +done + diff --git a/docker-compose.yml b/docker-compose.yml index 730ed427f..df8039ecc 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,16 +3,18 @@ version: '3.8' services: db: restart: always - image: "postgres:12.3-alpine" + build: + context: ./db environment: - POSTGRES_DB=${POSTGRES_DB} - POSTGRES_USER=${POSTGRES_USER} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_PORT=${POSTGRES_PORT} - ports: - - "5432:5432" + # ports: + # - "5432:5432" volumes: - postgres_data:/var/lib/postgresql/data/ + - ./db/imports:/imports/ networks: - rengine_network @@ -105,8 +107,8 @@ services: - nuclei_templates:/root/nuclei-templates - tool_config:/root/.config - static_volume:/usr/src/app/staticfiles/ - ports: - - "8000:8000" + # ports: + # - "8000:8000" depends_on: - db - celery diff --git a/web/scanEngine/models.py b/web/scanEngine/models.py index 5e8276d0d..d4f0c079a 100644 --- a/web/scanEngine/models.py +++ b/web/scanEngine/models.py @@ -14,6 +14,9 @@ class EngineType(models.Model): yaml_configuration = models.TextField() default_engine = models.BooleanField(null=True, default=False) + class Meta: + db_table = "scanengine_enginetype" + def __str__(self): return self.engine_name @@ -37,6 +40,9 @@ class Wordlist(models.Model): short_name = models.CharField(max_length=50, unique=True) count = models.IntegerField(default=0) + class Meta: + db_table = "scanengine_wordlist" + def __str__(self): return self.name @@ -47,6 +53,9 @@ class Configuration(models.Model): short_name = models.CharField(max_length=50, unique=True) content = models.TextField() + class Meta: + db_table = "scanengine_configuration" + def __str__(self): return self.name @@ -59,6 +68,9 @@ class InterestingLookupModel(models.Model): url_lookup = models.BooleanField(default=True) condition_200_http_lookup = models.BooleanField(default=False) + class Meta: + db_table = "scanengine_interestinglookupmodel" + class Notification(models.Model): id = models.AutoField(primary_key=True) @@ -78,12 +90,19 @@ class Notification(models.Model): send_scan_output_file = models.BooleanField(default=True) + class Meta: + db_table = "scanengine_notification" + + class Proxy(models.Model): id = models.AutoField(primary_key=True) use_proxy = models.BooleanField(default=False) proxies = models.TextField(blank=True, null=True) + class Meta: + db_table = "scanengine_proxy" + class Hackerone(models.Model): id = models.AutoField(primary_key=True) @@ -94,6 +113,9 @@ class Hackerone(models.Model): send_medium = models.BooleanField(default=False) report_template = models.TextField(blank=True, null=True) + class Meta: + db_table = "scanengine_hackerone" + class VulnerabilityReportSetting(models.Model): id = models.AutoField(primary_key=True) @@ -109,6 +131,9 @@ class VulnerabilityReportSetting(models.Model): show_footer = models.BooleanField(default=False) footer_text = models.CharField(max_length=200, null=True, blank=True) + class Meta: + db_table = "scanengine_vulnerabilityreportsetting" + class InstalledExternalTool(models.Model): id = models.AutoField(primary_key=True) @@ -127,5 +152,8 @@ class InstalledExternalTool(models.Model): github_clone_path = models.CharField(max_length=1500, null=True, blank=True) subdomain_gathering_command = models.CharField(max_length=300, null=True, blank=True) + class Meta: + db_table = "scanengine_installedexternaltool" + def __str__(self): return self.name diff --git a/web/startScan/models.py b/web/startScan/models.py index a17fc5b83..7d9c7fcd2 100644 --- a/web/startScan/models.py +++ b/web/startScan/models.py @@ -19,11 +19,8 @@ class ScanHistory(models.Model): start_scan_date = models.DateTimeField() scan_status = models.IntegerField() results_dir = models.CharField(max_length=100, blank=True) - domain = models.ForeignKey(Domain, on_delete=models.CASCADE) - scan_type = models.ForeignKey(EngineType, on_delete=models.CASCADE) celery_id = models.CharField(max_length=100, blank=True) subdomain_discovery = models.BooleanField(null=True, default=False) - waf_detection = models.BooleanField(null=True, default=False) dir_file_fuzz = models.BooleanField(null=True, default=False) port_scan = models.BooleanField(null=True, default=False) fetch_url = models.BooleanField(null=True, default=False) @@ -32,12 +29,18 @@ class ScanHistory(models.Model): screenshot = models.BooleanField(null=True, default=True) stop_scan_date = models.DateTimeField(null=True, blank=True) used_gf_patterns = models.CharField(max_length=500, null=True, blank=True) + domain = models.ForeignKey(Domain, on_delete=models.CASCADE) + scan_type = models.ForeignKey(EngineType, on_delete=models.CASCADE) error_message = models.CharField(max_length=300, blank=True, null=True) + waf_detection = models.BooleanField(null=True, default=False) # osint is directly linked to scan history and not subdomains emails = models.ManyToManyField('Email', related_name='emails', blank=True) employees = models.ManyToManyField('Employee', related_name='employees', blank=True) dorks = models.ManyToManyField('Dork', related_name='dorks', blank=True) + class Meta: + db_table = "startscan_scanhistory" + def __str__(self): # debug purpose remove scan type and id in prod return self.domain.name @@ -170,6 +173,9 @@ class Subdomain(models.Model): directories = models.ManyToManyField('DirectoryScan', related_name='directories', blank=True) waf = models.ManyToManyField('Waf', related_name='waf', blank=True) + class Meta: + db_table = "startscan_subdomain" + def __str__(self): return str(self.name) @@ -253,6 +259,9 @@ class SubScan(models.Model): error_message = models.CharField(max_length=300, blank=True, null=True) engine = models.ForeignKey(EngineType, on_delete=models.CASCADE, blank=True, null=True) + class Meta: + db_table = "startscan_subscan" + def get_completed_ago(self): if self.stop_scan_date: @@ -304,6 +313,9 @@ class EndPoint(models.Model): # used for subscans endpoint_subscan_ids = models.ManyToManyField('SubScan', related_name='endpoint_subscan_ids', blank=True) + class Meta: + db_table = "startscan_endpoint" + def __str__(self): return self.http_url @@ -312,6 +324,9 @@ class VulnerabilityTags(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=100) + class Meta: + db_table = "startscan_vulnerabilitytags" + def __str__(self): return self.name @@ -320,6 +335,9 @@ class VulnerabilityReference(models.Model): id = models.AutoField(primary_key=True) url = models.CharField(max_length=5000) + class Meta: + db_table = "startscan_vulnerabilityreference" + def __str__(self): return self.url @@ -328,6 +346,9 @@ class CveId(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=100) + class Meta: + db_table = "startscan_cveid" + def __str__(self): return self.name @@ -336,6 +357,9 @@ class CweId(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=100) + class Meta: + db_table = "startscan_cweid" + def __str__(self): return self.name @@ -383,6 +407,9 @@ class Vulnerability(models.Model): # used for subscans vuln_subscan_ids = models.ManyToManyField('SubScan', related_name='vuln_subscan_ids', blank=True) + class Meta: + db_table = "startscan_vulnerability" + def __str__(self): return self.name @@ -398,6 +425,9 @@ class ScanActivity(models.Model): status = models.IntegerField() error_message = models.CharField(max_length=300, blank=True, null=True) + class Meta: + db_table = "startscan_scanactivity" + def __str__(self): return str(self.title) @@ -407,6 +437,9 @@ class Waf(models.Model): name = models.CharField(max_length=500) manufacturer = models.CharField(max_length=500, blank=True, null=True) + class Meta: + db_table = "startscan_waf" + def __str__(self): return str(self.name) @@ -415,6 +448,9 @@ class Technology(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=500, blank=True, null=True) + class Meta: + db_table = "startscan_technology" + def __str__(self): return str(self.name) @@ -424,6 +460,9 @@ class CountryISO(models.Model): iso = models.CharField(max_length=10, blank=True) name = models.CharField(max_length=100, blank=True) + class Meta: + db_table = "startscan_countryiso" + def __str__(self): return str(self.name) @@ -438,6 +477,9 @@ class IpAddress(models.Model): # this is used for querying which ip was discovered during subcan ip_subscan_ids = models.ManyToManyField('SubScan', related_name='ip_subscan_ids') + class Meta: + db_table = "startscan_ipaddress" + def __str__(self): return str(self.address) @@ -449,6 +491,9 @@ class Port(models.Model): description = models.CharField(max_length=1000, blank=True, null=True) is_uncommon = models.BooleanField(default=False) + class Meta: + db_table = "startscan_port" + def __str__(self): return str(self.service_name) @@ -463,6 +508,9 @@ class DirectoryFile(models.Model): url = models.CharField(max_length=5000, blank=True, null=True) content_type = models.CharField(max_length=100, blank=True, null=True) + class Meta: + db_table = "startscan_directoryfile" + def __str__(self): return str(self.name) @@ -475,6 +523,9 @@ class DirectoryScan(models.Model): # this is used for querying which ip was discovered during subcan dir_subscan_ids = models.ManyToManyField('SubScan', related_name='dir_subscan_ids', blank=True) + class Meta: + db_table = "startscan_directoryscan" + class MetaFinderDocument(models.Model): id = models.AutoField(primary_key=True) @@ -497,20 +548,32 @@ class MetaFinderDocument(models.Model): creation_date = models.CharField(max_length=1000, blank=True, null=True) modified_date = models.CharField(max_length=1000, blank=True, null=True) + class Meta: + db_table = "startscan_metafinderdocument" + class Email(models.Model): id = models.AutoField(primary_key=True) address = models.CharField(max_length=200, blank=True, null=True) password = models.CharField(max_length=200, blank=True, null=True) + class Meta: + db_table = "startscan_email" + class Employee(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=1000, null=True, blank=True) designation = models.CharField(max_length=1000, null=True, blank=True) + class Meta: + db_table = "startscan_employee" + class Dork(models.Model): id = models.AutoField(primary_key=True) type = models.CharField(max_length=500, null=True, blank=True) description = models.CharField(max_length=1500, null=True, blank=True) url = models.CharField(max_length=10000, null=True, blank=True) + + class Meta: + db_table = "startscan_dork" diff --git a/web/targetApp/models.py b/web/targetApp/models.py index f4a388ce4..4b0e95440 100644 --- a/web/targetApp/models.py +++ b/web/targetApp/models.py @@ -10,6 +10,9 @@ class AssociatedDomain(models.Model): name = models.CharField(max_length=250, null=True, blank=True) # target_id = models.ForeignKey(Domain, on_delete=models.CASCADE, null=True, blank=True) + class Meta: + db_table = "targetapp_associateddomain" + def __str__(self): return self.name @@ -18,6 +21,9 @@ class RelatedTLD(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=250, null=True, blank=True) + class Meta: + db_table = "targetapp_relatedtld" + def __str__(self): return self.name @@ -26,6 +32,9 @@ class NameServers(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=500, null=True, blank=True) + class Meta: + db_table = "targetapp_nameservers" + def __str__(self): return self.name @@ -34,6 +43,9 @@ class DomainRegistrar(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=500) + class Meta: + db_table = "targetapp_domainregistrar" + def __str__(self): return self.name @@ -42,6 +54,9 @@ class DomainRegisterName(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=500) + class Meta: + db_table = "targetapp_domainregistername" + def __str__(self): return self.name @@ -50,6 +65,9 @@ class DomainRegisterOrganization(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=500) + class Meta: + db_table = "targetapp_domainregisterorganization" + def __str__(self): return self.name @@ -58,6 +76,9 @@ class DomainAddress(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=500) + class Meta: + db_table = "targetapp_domainaddress" + def __str__(self): return self.name @@ -66,6 +87,9 @@ class DomainCity(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=100) + class Meta: + db_table = "targetapp_domaincity" + def __str__(self): return self.name @@ -74,6 +98,9 @@ class DomainState(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=50) + class Meta: + db_table = "targetapp_domainstate" + def __str__(self): return self.name @@ -82,6 +109,9 @@ class DomainZipCode(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=20) + class Meta: + db_table = "targetapp_domainzipcode" + def __str__(self): return self.name @@ -90,6 +120,9 @@ class DomainCountry(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=20) + class Meta: + db_table = "targetapp_domaincountry" + def __str__(self): return self.name @@ -98,6 +131,9 @@ class DomainEmail(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=500) + class Meta: + db_table = "targetapp_domainemail" + def __str__(self): return self.name @@ -106,6 +142,9 @@ class DomainPhone(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=100) + class Meta: + db_table = "targetapp_domainphone" + def __str__(self): return self.name @@ -114,6 +153,9 @@ class DomainFax(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=100) + class Meta: + db_table = "targetapp_domainfax" + def __str__(self): return self.name @@ -122,6 +164,9 @@ class DomainWhoisStatus(models.Model): id = models.AutoField(primary_key=True) status = models.CharField(max_length=500) + class Meta: + db_table = "targetapp_domainwhoisstatus" + def __str__(self): return self.status @@ -130,6 +175,9 @@ class DomainRegistrarID(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=500) + class Meta: + db_table = "targetapp_domainregistrarid" + def __str__(self): return self.name @@ -188,6 +236,9 @@ class DomainInfo(models.Model): associated_domains = models.ManyToManyField(AssociatedDomain, blank=True) related_tlds = models.ManyToManyField(RelatedTLD, blank=True) + + class Meta: + db_table = "targetapp_domaininfo" def __str__(self): return self.id @@ -199,6 +250,9 @@ class Organization(models.Model): description = models.TextField(blank=True, null=True) insert_date = models.DateTimeField() domains = models.ManyToManyField('Domain', related_name='domains') + + class Meta: + db_table = "targetapp_organization" def __str__(self): return self.name @@ -216,6 +270,9 @@ class Domain(models.Model): insert_date = models.DateTimeField() start_scan_date = models.DateTimeField(null=True) domain_info = models.ForeignKey(DomainInfo, on_delete=models.CASCADE, null=True, blank=True) + + class Meta: + db_table = "targetapp_domain" def get_organization(self): return Organization.objects.filter(domains__id=self.id)