From 84a21f132b28a85cb93d6b243ff5e94c1fa61d53 Mon Sep 17 00:00:00 2001 From: Keegan White Date: Fri, 22 Aug 2025 12:26:51 +0200 Subject: [PATCH 1/7] Enhance Docker configurations and add setup scripts - Updated `docker-compose-dev.yml` to include a sleep command before database migrations and added makemigrations step. - Modified `docker-compose-prod.yml` to use environment variables for Traefik configuration and updated container names for versioning. - Added a new `setup.sh` script for easier environment setup and management. - Updated `README.md` to include prerequisites and setup instructions. - Introduced new JSON example files for RADIUSdesk and smart contracts. - Added management commands for creating users and smart contracts from JSON files. - Implemented TimescaleDB checks and continuous aggregate refresh commands. - Enhanced serializers and views for better data handling and pagination in the API. --- README.md | 13 +- docker-compose-dev.yml | 5 +- docker-compose-prod.yml | 24 +- docs/README.md | 13 + docs/examples/sample_radiusdesk_config.json | 148 ++++++ docs/examples/smart_contracts.json | 20 + docs/examples/users.json | 12 + docs/keycloak/README.md | 63 +++ inethi/api_key/migrations/0001_initial.py | 6 +- .../commands/create_radiusdesk_from_json.py | 342 ++++++++++++ .../create_smart_contracts_from_json.py | 197 +++++++ .../management/commands/create_superuser.py | 57 ++ .../commands/create_users_from_json.py | 277 ++++++++++ inethi/core/migrations/0001_initial.py | 91 +++- inethi/network/admin.py | 5 +- .../management/commands/check_timescaledb.py | 174 ++++++ .../commands/refresh_continuous_aggregates.py | 45 ++ inethi/network/migrations/0001_initial.py | 31 +- .../migrations/0002_auto_20250811_1700.py | 118 +++++ inethi/network/models.py | 24 +- inethi/network/serializers.py | 2 +- inethi/network/views.py | 57 +- inethi/radiusdesk/migrations/0001_initial.py | 12 +- inethi/radiusdesk/serializers.py | 48 ++ inethi/radiusdesk/views.py | 284 ++++++++-- inethi/reward/migrations/0001_initial.py | 10 +- inethi/reward/tasks.py | 8 +- inethi/sample_radiusdesk_config.json | 148 ++++++ inethi/smart_contracts.json | 20 + inethi/smart_contracts/views.py | 105 ++-- inethi/transaction/views.py | 15 +- inethi/users.json | 12 + inethi/utils/crypto.py | 501 +++++++++++------- inethi/utils/radius_desk.py | 35 ++ inethi/wallet/serializers.py | 39 +- inethi/wallet/views.py | 83 ++- requirements.txt | 1 + setup.sh | 420 +++++++++++++++ 38 files changed, 3054 insertions(+), 411 deletions(-) create mode 100644 docs/README.md create mode 100644 docs/examples/sample_radiusdesk_config.json create mode 100644 docs/examples/smart_contracts.json create mode 100644 docs/examples/users.json create mode 100644 docs/keycloak/README.md create mode 100644 inethi/core/management/commands/create_radiusdesk_from_json.py create mode 100644 inethi/core/management/commands/create_smart_contracts_from_json.py create mode 100644 inethi/core/management/commands/create_superuser.py create mode 100644 inethi/core/management/commands/create_users_from_json.py create mode 100644 inethi/network/management/commands/check_timescaledb.py create mode 100644 inethi/network/management/commands/refresh_continuous_aggregates.py create mode 100644 inethi/network/migrations/0002_auto_20250811_1700.py create mode 100644 inethi/sample_radiusdesk_config.json create mode 100644 inethi/smart_contracts.json create mode 100644 inethi/users.json create mode 100755 setup.sh diff --git a/README.md b/README.md index f71514d..adf20e5 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,10 @@ The backend for the bespoke iNethi system. ## Commands +### Prerequisites + +Read the [docs](./docs/README.md) for details of the set up and requirements. + ### Running the Code Look at [.env.example](.env.example) and create a `.env` file with all the variables listed in this file. See @@ -55,12 +59,3 @@ print(f"Your encryption key: {encryption_key.decode()}") from django.core.management.utils import get_random_secret_key print(get_random_secret_key()) ``` - -## Contributing - -Pull requests and issues are welcome! Please open an issue to discuss major changes or new features. - -## License - -> [!WARNING] -> Closed-source commercial usage of this code is not permitted with the GPL-3.0. If that license is not compatible with your use case, please contact keeganthomaswhite@gmail.com for queries. diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index cfec3ea..6c1ee47 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -13,7 +13,9 @@ services: volumes: - ./inethi:/inethi command: > - sh -c "python manage.py wait_for_db && + sh -c "sleep 10 && + python manage.py wait_for_db && + python manage.py makemigrations && python manage.py migrate && python manage.py runserver 0.0.0.0:8000" environment: @@ -37,6 +39,7 @@ services: - POSTGRES_DB=${DB_NAME:-devdb} - POSTGRES_USER=${DB_USER:-devuser} - POSTGRES_PASSWORD=${DB_PASS:-devpass} + command: postgres -c shared_preload_libraries=timescaledb keycloak: image: quay.io/keycloak/keycloak:25.0.6 diff --git a/docker-compose-prod.yml b/docker-compose-prod.yml index 6a7e97b..f4390a4 100644 --- a/docker-compose-prod.yml +++ b/docker-compose-prod.yml @@ -1,6 +1,6 @@ services: app: - container_name: inethi-backend + container_name: inethi-backend-v1 restart: unless-stopped env_file: - ./.env @@ -25,29 +25,30 @@ services: - db labels: - "traefik.enable=true" - - "traefik.http.routers.backend.rule=Host(`backend.inethicloud.net`)" - - "traefik.http.routers.backend.entrypoints=websecure" + - "traefik.http.routers.backend.rule=Host(`${TRAEFIK_BACKEND_HOST:-backend.inethicloud.net}`)" + - "traefik.http.routers.backend.entrypoints=${TRAEFIK_ENTRYPOINTS:-websecure}" - "traefik.http.services.backend.loadbalancer.server.port=8000" + - "traefik.http.routers.backend.tls.certresolver=${TRAEFIK_CERTRESOLVER:-letsencrypt}" db: image: timescale/timescaledb:latest-pg17 restart: unless-stopped - container_name: inethi-backend-db-timescale + container_name: inethi-backend-db-timescale-v1 env_file: - ./.env volumes: - - prod-db-data-timescale:/var/lib/postgresql/data + - prod-db-data-timescale-v1:/var/lib/postgresql/data environment: - POSTGRES_DB=${DB_NAME:-devdb} - POSTGRES_USER=${DB_USER:-devuser} - POSTGRES_PASSWORD=${DB_PASS:-devpass} labels: - "traefik.enable=true" - - "traefik.http.routers.backenddb.entrypoints=websecure" + - "traefik.http.routers.backenddbv1.entrypoints=${TRAEFIK_ENTRYPOINTS:-websecure}" redis: image: redis:alpine - container_name: inethi-backend-celery-redis + container_name: inethi-backend-celery-redis-v1 celery: build: @@ -61,7 +62,7 @@ services: environment: - CELERY_BROKER=redis://redis:6379/0 - CELERY_BACKEND=redis://redis:6379/0 - container_name: inethi-backend-celery + container_name: inethi-backend-celery-v1 depends_on: - redis - db @@ -76,16 +77,15 @@ services: environment: - CELERY_BROKER=redis://redis:6379/0 - CELERY_BACKEND=redis://redis:6379/0 - container_name: inethi-backend-celery-beat + container_name: inethi-backend-celery-beat-v1 depends_on: - redis - db - networks: default: external: - name: "inethi-bridge-traefik" + name: "${TRAEFIK_NETWORK_BRIDGE:-inethi-bridge-traefik}" volumes: - prod-db-data-timescale: \ No newline at end of file + prod-db-data-timescale-v1: diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..ac7f410 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,13 @@ +# Setting Up The System + +1. Populate a `.env` file. +2. If you don't have keycloak and radiusdesk you need to set these up. The details for Keycloak can be found [here](./keycloak/README.md). +3. Run the compose file of your choosing then run `docker exec inethi-backend python manage.py create_superuser`. +4. Create json files and place them in the [inethi dir](../inethi/) for your NETWORK admin user(s), RADIUSDesk instance(s) and smart contract(s) commands, see the [users example](./examples/users.json), [RADIUSDesk example](./examples/sample_radiusdesk_config.json) and the contracts [example](./examples/smart_contracts.json) then run the following: + +``` +docker exec -it inethi-backend sh +python manage.py create_users_from_json user.json +python manage.py create_smart_contracts_from_json smart_contracts.json +python manage.py create_radiusdesk_from_json sample_radiusdesk_config.json +``` diff --git a/docs/examples/sample_radiusdesk_config.json b/docs/examples/sample_radiusdesk_config.json new file mode 100644 index 0000000..d6074e0 --- /dev/null +++ b/docs/examples/sample_radiusdesk_config.json @@ -0,0 +1,148 @@ +[ + { + "name": "Main RADIUSdesk Instance", + "base_url": "https://radiusdesk.example.com", + "username": "admin", + "password": "admin_password", + "accepts_crypto": true, + "administrators": ["devuser", "network_admin"], + "clouds": [ + { + "name": "Main Cloud", + "radius_desk_id": 1, + "realms": [ + { + "name": "Default Realm", + "radius_desk_id": 1, + "profiles": [ + { + "name": "Basic Plan", + "radius_desk_id": 1, + "data_limit_enabled": true, + "data_limit_gb": 5.0, + "data_limit_reset": "monthly", + "speed_limit_enabled": true, + "speed_limit_mbs": 10.0, + "limit_session_enabled": false, + "session_limit": 0, + "cost": 0.0 + }, + { + "name": "Premium Plan", + "radius_desk_id": 2, + "data_limit_enabled": true, + "data_limit_gb": 20.0, + "data_limit_reset": "monthly", + "speed_limit_enabled": true, + "speed_limit_mbs": 50.0, + "limit_session_enabled": false, + "session_limit": 0, + "cost": 10.0 + }, + { + "name": "Unlimited Plan", + "radius_desk_id": 3, + "data_limit_enabled": false, + "data_limit_gb": 0, + "data_limit_reset": "never", + "speed_limit_enabled": true, + "speed_limit_mbs": 100.0, + "limit_session_enabled": true, + "session_limit": 1, + "cost": 25.0 + } + ] + }, + { + "name": "Guest Realm", + "radius_desk_id": 2, + "profiles": [ + { + "name": "Guest Access", + "radius_desk_id": 4, + "data_limit_enabled": true, + "data_limit_gb": 1.0, + "data_limit_reset": "daily", + "speed_limit_enabled": true, + "speed_limit_mbs": 5.0, + "limit_session_enabled": true, + "session_limit": 2, + "cost": 0.0 + } + ] + } + ] + }, + { + "name": "Secondary Cloud", + "radius_desk_id": 2, + "realms": [ + { + "name": "Business Realm", + "radius_desk_id": 3, + "profiles": [ + { + "name": "Business Basic", + "radius_desk_id": 5, + "data_limit_enabled": true, + "data_limit_gb": 50.0, + "data_limit_reset": "monthly", + "speed_limit_enabled": true, + "speed_limit_mbs": 25.0, + "limit_session_enabled": false, + "session_limit": 0, + "cost": 15.0 + }, + { + "name": "Business Pro", + "radius_desk_id": 6, + "data_limit_enabled": true, + "data_limit_gb": 100.0, + "data_limit_reset": "monthly", + "speed_limit_enabled": true, + "speed_limit_mbs": 100.0, + "limit_session_enabled": true, + "session_limit": 5, + "cost": 35.0 + } + ] + } + ] + } + ] + }, + { + "name": "Test RADIUSdesk Instance", + "base_url": "https://test-radiusdesk.example.com", + "username": "test_admin", + "password": "test_password", + "accepts_crypto": false, + "administrators": ["devuser"], + "clouds": [ + { + "name": "Test Cloud", + "radius_desk_id": 1, + "realms": [ + { + "name": "Test Realm", + "radius_desk_id": 1, + "profiles": [ + { + "name": "Test Profile", + "radius_desk_id": 1, + "data_limit_enabled": false, + "data_limit_gb": 0, + "data_limit_reset": "never", + "speed_limit_enabled": false, + "speed_limit_mbs": 0, + "limit_session_enabled": false, + "session_limit": 0, + "cost": 0.0 + } + ] + } + ] + } + ] + } +] diff --git a/docs/examples/smart_contracts.json b/docs/examples/smart_contracts.json new file mode 100644 index 0000000..0d97630 --- /dev/null +++ b/docs/examples/smart_contracts.json @@ -0,0 +1,20 @@ +[ + { + "name": "iNethi Registry", + "address": "0x9b1a3F8FadD0dc86FBae5Cf66Fa682fDcd84a9b0", + "contract_type": "account index", + "description": "iNethi account registry for celo eth faucet", + "write_access": false, + "read_access": false, + "user_name": "inethiofficial" + }, + { + "name": "iNethi Faucet", + "address": "0xB821E49ADB53F0AbeD834278d5dFc57901c30Eea", + "contract_type": "eth faucet", + "description": "iNethi faucet for celo", + "write_access": false, + "read_access": false, + "user_name": "inethiofficial" + } +] diff --git a/docs/examples/users.json b/docs/examples/users.json new file mode 100644 index 0000000..acbe81a --- /dev/null +++ b/docs/examples/users.json @@ -0,0 +1,12 @@ +[ + { + "username": "network_admin_1", + "password": "password1", + "email": "network_admin_1@inethi.com" + }, + { + "username": "network_admin_2", + "password": "password2", + "email": "network_admin_2@inethi.com" + } +] diff --git a/docs/keycloak/README.md b/docs/keycloak/README.md new file mode 100644 index 0000000..ef66212 --- /dev/null +++ b/docs/keycloak/README.md @@ -0,0 +1,63 @@ +# Keycloak Integration with iNethi Backend + +## Set Up + +To get auth working for the UI and App you need to set up your keycloak instance. Navigate to your keycloak URL. If you use the dev compose file then navigate to [http://localhost:8080/](http://localhost:8080/). + +### Configuration + +These steps will use the following env variables. Repalce them with your own where neccessary: + +``` +KEYCLOAK_MASTER_ADMIN="devuser" +KEYCLOAK_MASTER_ADMIN_PASSWORD="devpass" +KEYCLOAK_ADMIN="inethi" +KEYCLOAK_ADMIN_PASSWORD="iNethi2023#" +KEYCLOAK_URL="http://192.168.0.1:8080" +KEYCLOAK_REALM="inethi-services" +KEYCLOAK_BACKEND_CLIENT_ID="inethi-backend-client" +KEYCLOAK_CLIENT_SECRET="oq3BKpcKtiVyNXatzaelqW2QV2zji7YG" +``` + +These will be used in the [settings.py file](../../inethi/inethi/settings.py) to generate the `KEYCLOAK_OPENID`: + +``` +KEYCLOAK_OPENID = KeycloakOpenID( + server_url=env("KEYCLOAK_URL"), + client_id=env("KEYCLOAK_BACKEND_CLIENT_ID"), + realm_name=env("KEYCLOAK_REALM"), + client_secret_key=env("KEYCLOAK_CLIENT_SECRET"), +) +``` + +1. Create a new realm called `inethi-services`. + +2. Ensure you are in this realm and then create a user with the username of `inethi` and password of `iNethi2023#`. Ensure email verification is set to on. + +3. Navigate to `Role mapping` and assign the inethi user these roles: + +- `realm-management` create-client +- `realm-management` manage-clients +- `realm-management` view-clients +- `realm-management` manage-users + +4. Navigate to clients and create a client with the following details: + +- Type: `OpenID Connect` +- Client ID: `inethi-backend-client` +- Name: `inethi-backend-client` +- Client authentication: `on` +- Authorization: `on` +- AUthentication flow: `standard flow, direct access grants, service account roles` +- Home URL: (enter your backend's URL): `http://localhost:8000` +- Valid redirect URIs: `http://localhost:8000/*` +- Valid post logout redirect URIs: `+` +- Web origins: `+` + +5. Navigate to Credentials on the client and copy the `Client Secret` into your `.env` file and replace the default value for `KEYCLOAK_CLIENT_SECRET`. Then restart your backend docker containers: + +``` +docker compose -f docker-compose-dev.yml down +docker compose -f docker-compose-dev.yml build +docker compose -f docker-compose-dev.yml up -d +``` diff --git a/inethi/api_key/migrations/0001_initial.py b/inethi/api_key/migrations/0001_initial.py index 2c9b8a3..17412e6 100644 --- a/inethi/api_key/migrations/0001_initial.py +++ b/inethi/api_key/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 5.1 on 2025-02-03 09:30 +# Generated by Django 5.1 on 2025-08-08 14:01 import django.db.models.deletion from django.conf import settings @@ -18,9 +18,9 @@ class Migration(migrations.Migration): name='APIKey', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('key', models.CharField(db_index=True, max_length=255, unique=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), + ('key', models.CharField(blank=True, max_length=255, unique=True)), ('is_active', models.BooleanField(default=True)), + ('created_at', models.DateTimeField(auto_now_add=True)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='api_keys', to=settings.AUTH_USER_MODEL)), ], ), diff --git a/inethi/core/management/commands/create_radiusdesk_from_json.py b/inethi/core/management/commands/create_radiusdesk_from_json.py new file mode 100644 index 0000000..0b0a8fb --- /dev/null +++ b/inethi/core/management/commands/create_radiusdesk_from_json.py @@ -0,0 +1,342 @@ +""" +Django command to create RADIUSdesk instances, clouds, realms, and profiles from a JSON file. +""" +import json +import os +import logging +from django.core.management.base import BaseCommand, CommandError +from django.contrib.auth import get_user_model +from radiusdesk.models import ( + RadiusDeskInstance, + Cloud, + Realm, + RadiusDeskProfile +) + +User = get_user_model() +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + """Django command to create RADIUSdesk instances, clouds, realms, and profiles from JSON file.""" + + help = 'Creates RADIUSdesk instances, clouds, realms, and profiles from JSON file' + + def add_arguments(self, parser): + """Add command arguments.""" + parser.add_argument( + 'json_file', + type=str, + help='Path to JSON file containing RADIUSdesk configuration data' + ) + parser.add_argument( + '--dry-run', + action='store_true', + help='Show what would be created without actually creating anything' + ) + + def validate_instance_data(self, instance_data, index): + """Validate RADIUSdesk instance data.""" + required_fields = ['name', 'base_url', 'username', 'password'] + missing_fields = [field for field in required_fields if field not in instance_data] + + if missing_fields: + raise CommandError( + f'Instance {index+1}: Missing required fields: {", ".join(missing_fields)}' + ) + + # Validate administrators if provided + administrators = instance_data.get('administrators', []) + if administrators: + for admin_username in administrators: + try: + User.objects.get(username=admin_username) + except User.DoesNotExist: + raise CommandError( + f'Instance {index+1}: Administrator user "{admin_username}" does not exist' + ) + + def validate_cloud_data(self, cloud_data, index): + """Validate cloud data.""" + required_fields = ['name', 'radius_desk_id'] + missing_fields = [field for field in required_fields if field not in cloud_data] + + if missing_fields: + raise CommandError( + f'Cloud {index+1}: Missing required fields: {", ".join(missing_fields)}' + ) + + def validate_realm_data(self, realm_data, index): + """Validate realm data.""" + required_fields = ['name', 'radius_desk_id'] + missing_fields = [field for field in required_fields if field not in realm_data] + + if missing_fields: + raise CommandError( + f'Realm {index+1}: Missing required fields: {", ".join(missing_fields)}' + ) + + def validate_profile_data(self, profile_data, index): + """Validate profile data.""" + required_fields = ['name', 'radius_desk_id'] + missing_fields = [field for field in required_fields if field not in profile_data] + + if missing_fields: + raise CommandError( + f'Profile {index+1}: Missing required fields: {", ".join(missing_fields)}' + ) + + def handle(self, *args, **options): + """Entry point for Django management command.""" + json_file = options['json_file'] + dry_run = options['dry_run'] + + # Check if file exists + if not os.path.exists(json_file): + raise CommandError(f'JSON file "{json_file}" does not exist.') + + # Read and parse JSON file + try: + with open(json_file, 'r') as f: + config_data = json.load(f) + except json.JSONDecodeError as e: + raise CommandError(f'Invalid JSON format: {str(e)}') + except Exception as e: + raise CommandError(f'Error reading JSON file: {str(e)}') + + # Validate JSON structure + if not isinstance(config_data, list): + raise CommandError( + 'JSON file must contain a list of RADIUSdesk instance configurations.' + ) + + if dry_run: + self.stdout.write( + self.style.WARNING('DRY RUN MODE - No objects will be created') + ) + + created_objects = [] + errors = [] + + for i, instance_config in enumerate(config_data): + try: + # Validate instance data + self.validate_instance_data(instance_config, i) + + instance_name = instance_config['name'] + base_url = instance_config['base_url'] + username = instance_config['username'] + password = instance_config['password'] + accepts_crypto = instance_config.get('accepts_crypto', False) + administrators = instance_config.get('administrators', []) + + if dry_run: + self.stdout.write(f'Would create instance: {instance_name}') + created_objects.append({ + 'type': 'instance', + 'name': instance_name, + 'status': 'would_create' + }) + else: + # Create or get RADIUSdesk instance + instance, created = RadiusDeskInstance.objects.get_or_create( + name=instance_name, + defaults={ + 'base_url': base_url, + 'username': username, + 'password': password, + 'accepts_crypto': accepts_crypto, + } + ) + + if created: + self.stdout.write(f'Created RADIUSdesk instance: {instance_name}') + else: + self.stdout.write(f'RADIUSdesk instance already exists: {instance_name}') + + # Add administrators + for admin_username in administrators: + try: + admin_user = User.objects.get(username=admin_username) + instance.administrators.add(admin_user) + self.stdout.write( + f'Added administrator {admin_username} to instance {instance_name}' + ) + except User.DoesNotExist: + errors.append( + f'Instance {instance_name}: Administrator user "{admin_username}" does not exist' + ) + + # Process clouds + clouds_data = instance_config.get('clouds', []) + for j, cloud_data in enumerate(clouds_data): + try: + self.validate_cloud_data(cloud_data, j) + + cloud_name = cloud_data['name'] + cloud_radius_desk_id = cloud_data['radius_desk_id'] + + if dry_run: + self.stdout.write(f'Would create cloud: {cloud_name}') + created_objects.append({ + 'type': 'cloud', + 'name': cloud_name, + 'instance': instance_name, + 'status': 'would_create' + }) + else: + # Create cloud in database + cloud, cloud_created = Cloud.objects.get_or_create( + name=cloud_name, + radius_desk_instance=instance, + defaults={'radius_desk_id': cloud_radius_desk_id} + ) + + if cloud_created: + self.stdout.write(f'Created cloud: {cloud_name}') + else: + self.stdout.write(f'Cloud already exists: {cloud_name}') + + # Process realms + realms_data = cloud_data.get('realms', []) + for k, realm_data in enumerate(realms_data): + try: + self.validate_realm_data(realm_data, k) + + realm_name = realm_data['name'] + realm_radius_desk_id = realm_data['radius_desk_id'] + + if dry_run: + self.stdout.write(f'Would create realm: {realm_name}') + created_objects.append({ + 'type': 'realm', + 'name': realm_name, + 'instance': instance_name, + 'cloud': cloud_name, + 'status': 'would_create' + }) + else: + # Create realm in database + realm, realm_created = Realm.objects.get_or_create( + name=realm_name, + cloud=cloud, + radius_desk_instance=instance, + defaults={'radius_desk_id': realm_radius_desk_id} + ) + + if realm_created: + self.stdout.write(f'Created realm: {realm_name}') + else: + self.stdout.write(f'Realm already exists: {realm_name}') + + # Process profiles + profiles_data = realm_data.get('profiles', []) + for profile_idx, profile_data in enumerate(profiles_data): + try: + self.validate_profile_data(profile_data, profile_idx) + + profile_name = profile_data['name'] + profile_radius_desk_id = profile_data['radius_desk_id'] + + if dry_run: + self.stdout.write(f'Would create profile: {profile_name}') + created_objects.append({ + 'type': 'profile', + 'name': profile_name, + 'instance': instance_name, + 'cloud': cloud_name, + 'realm': realm_name, + 'status': 'would_create' + }) + else: + # Create profile in database + profile, profile_created = RadiusDeskProfile.objects.get_or_create( + name=profile_name, + realm=realm, + cloud=cloud, + radius_desk_instance=instance, + defaults={ + 'radius_desk_id': profile_radius_desk_id, + 'data_limit_enabled': profile_data.get('data_limit_enabled', False), + 'data_limit_gb': profile_data.get('data_limit_gb', 0), + 'data_limit_reset': profile_data.get('data_limit_reset', 'never'), + 'speed_limit_enabled': profile_data.get('speed_limit_enabled', False), + 'speed_limit_mbs': profile_data.get('speed_limit_mbs', 0), + 'limit_session_enabled': profile_data.get('limit_session_enabled', False), + 'session_limit': profile_data.get('session_limit', 0), + 'cost': profile_data.get('cost', 0) + } + ) + + if profile_created: + self.stdout.write(f'Created profile: {profile_name}') + else: + self.stdout.write(f'Profile already exists: {profile_name}') + + created_objects.append({ + 'type': 'profile', + 'name': profile_name, + 'instance': instance_name, + 'cloud': cloud_name, + 'realm': realm_name, + 'status': 'created' if profile_created else 'exists' + }) + + except Exception as e: + errors.append(f'Profile {profile_name}: {str(e)}') + + if not dry_run: + created_objects.append({ + 'type': 'realm', + 'name': realm_name, + 'instance': instance_name, + 'cloud': cloud_name, + 'status': 'created' if realm_created else 'exists' + }) + + except Exception as e: + errors.append(f'Realm {realm_name}: {str(e)}') + + if not dry_run: + created_objects.append({ + 'type': 'cloud', + 'name': cloud_name, + 'instance': instance_name, + 'status': 'created' if cloud_created else 'exists' + }) + + except Exception as e: + errors.append(f'Cloud {cloud_name}: {str(e)}') + + if not dry_run: + created_objects.append({ + 'type': 'instance', + 'name': instance_name, + 'status': 'created' if created else 'exists' + }) + + except Exception as e: + errors.append(f'Instance {i+1}: {str(e)}') + + # Summary + self.stdout.write('\n' + '='*50) + self.stdout.write('SUMMARY') + self.stdout.write('='*50) + + if created_objects: + self.stdout.write(f'\nCreated/Found {len(created_objects)} objects:') + for obj in created_objects: + status_color = self.style.SUCCESS if obj['status'] == 'created' else self.style.WARNING + self.stdout.write( + status_color(f" {obj['type'].title()}: {obj['name']} ({obj['status']})") + ) + + if errors: + self.stdout.write(f'\n{len(errors)} errors occurred:') + for error in errors: + self.stdout.write(self.style.ERROR(f" {error}")) + + if not errors and not dry_run: + self.stdout.write(self.style.SUCCESS('\nAll RADIUSdesk objects created successfully!')) + elif dry_run: + self.stdout.write(self.style.WARNING('\nDry run completed. No objects were created.')) diff --git a/inethi/core/management/commands/create_smart_contracts_from_json.py b/inethi/core/management/commands/create_smart_contracts_from_json.py new file mode 100644 index 0000000..cc1dfca --- /dev/null +++ b/inethi/core/management/commands/create_smart_contracts_from_json.py @@ -0,0 +1,197 @@ +""" +Django command to create smart contracts from a JSON file and assign to superuser. +""" +import json +import os +from django.core.management.base import BaseCommand, CommandError +from django.contrib.auth import get_user_model +from django.db import transaction +from core.models import SmartContract, FaucetSmartContract, AccountsIndexContract + +User = get_user_model() + + +class Command(BaseCommand): + """Django command to create smart contracts from JSON file.""" + + help = 'Creates smart contracts from JSON file and assigns to specified users' + + def add_arguments(self, parser): + """Add command arguments.""" + parser.add_argument( + 'json_file', + type=str, + help='Path to JSON file containing smart contract data' + ) + parser.add_argument( + '--dry-run', + action='store_true', + help='Show what would be created without actually creating contracts' + ) + parser.add_argument( + '--default-user', + type=str, + help='Default username to assign contracts to if user_name not specified in JSON (defaults to SUPERUSER_USERNAME from env)' + ) + + def handle(self, *args, **options): + """Entry point for Django management command.""" + json_file = options['json_file'] + dry_run = options['dry_run'] + default_user = options['default_user'] + + # Get default user username from env if not provided + if not default_user: + default_user = os.getenv('SUPERUSER_USERNAME') + if not default_user: + raise CommandError('SUPERUSER_USERNAME not found in environment variables.') + + # Get default user + try: + default_user_obj = User.objects.get(username=default_user) + except User.DoesNotExist: + raise CommandError(f'User with username "{default_user}" does not exist.') + + # Check if file exists + if not os.path.exists(json_file): + raise CommandError(f'JSON file "{json_file}" does not exist.') + + # Read and parse JSON file + try: + with open(json_file, 'r') as f: + contracts_data = json.load(f) + except json.JSONDecodeError as e: + raise CommandError(f'Invalid JSON format: {str(e)}') + except Exception as e: + raise CommandError(f'Error reading JSON file: {str(e)}') + + # Validate JSON structure + if not isinstance(contracts_data, list): + raise CommandError('JSON file must contain a list of contract objects.') + + if dry_run: + self.stdout.write(self.style.WARNING('DRY RUN MODE - No contracts will be created')) + + created_contracts = [] + errors = [] + + for i, contract_data in enumerate(contracts_data): + try: + # Validate required fields + required_fields = ['name', 'address', 'contract_type'] + missing_fields = [field for field in required_fields if field not in contract_data] + + if missing_fields: + errors.append(f'Contract {i+1}: Missing required fields: {", ".join(missing_fields)}') + continue + + name = contract_data['name'] + address = contract_data['address'] + contract_type = contract_data['contract_type'] + description = contract_data.get('description', '') + write_access = contract_data.get('write_access', False) + read_access = contract_data.get('read_access', True) + + # Get user to assign contract to + user_name = contract_data.get('user_name', default_user) + try: + user = User.objects.get(username=user_name) + except User.DoesNotExist: + errors.append(f'Contract {i+1}: User "{user_name}" does not exist') + continue + + if dry_run: + self.stdout.write( + f'Would create contract: {name} ({contract_type}) at {address} assigned to user: {user_name}' + ) + continue + + # Create contract in transaction + with transaction.atomic(): + # Check if contract already exists + if SmartContract.objects.filter(address=address).exists(): + errors.append(f'Contract {i+1}: Address "{address}" already exists') + continue + + # Create base contract + base_contract = SmartContract.objects.create( + name=name, + address=address, + description=description, + user=user, + write_access=write_access, + read_access=read_access, + contract_type=contract_type + ) + + # Create specific contract type if needed + if contract_type.lower() == 'faucet': + owner_address = contract_data.get('owner_address', '') + if not owner_address: + errors.append(f'Contract {i+1}: Faucet contract requires owner_address') + continue + + FaucetSmartContract.objects.create( + smartcontract_ptr=base_contract, + owner_address=owner_address, + gimme=contract_data.get('gimme', False), + give_to=contract_data.get('give_to', False), + next_balance=contract_data.get('next_balance', False), + next_time=contract_data.get('next_time', False), + registry_address=contract_data.get('registry_address', '') + ) + + elif contract_type.lower() == 'account_index': + owner_address = contract_data.get('owner_address', '') + if not owner_address: + errors.append(f'Contract {i+1}: Account index contract requires owner_address') + continue + + AccountsIndexContract.objects.create( + smartcontract_ptr=base_contract, + owner_address=owner_address, + entry=contract_data.get('entry', False), + entry_count=contract_data.get('entry_count', False), + is_active=contract_data.get('is_active', False), + activate=contract_data.get('activate', False), + deactivate=contract_data.get('deactivate', False), + add=contract_data.get('add', False), + remove=contract_data.get('remove', False) + ) + + created_contracts.append({ + 'name': name, + 'address': address, + 'contract_type': contract_type, + 'user': user_name + }) + + self.stdout.write( + self.style.SUCCESS( + f'Created contract "{name}" ({contract_type}) at {address} assigned to user: {user_name}' + ) + ) + + except Exception as e: + errors.append(f'Contract {i+1}: {str(e)}') + continue + + # Summary + if dry_run: + self.stdout.write( + self.style.WARNING(f'Would create {len(contracts_data)} contracts') + ) + else: + self.stdout.write( + self.style.SUCCESS(f'Successfully created {len(created_contracts)} contracts') + ) + + if created_contracts: + self.stdout.write('\nCreated contracts:') + for contract in created_contracts: + self.stdout.write(f' - {contract["name"]} ({contract["contract_type"]}) at {contract["address"]} assigned to {contract["user"]}') + + if errors: + self.stdout.write('\nErrors:') + for error in errors: + self.stdout.write(self.style.ERROR(f' - {error}')) diff --git a/inethi/core/management/commands/create_superuser.py b/inethi/core/management/commands/create_superuser.py new file mode 100644 index 0000000..aa7616b --- /dev/null +++ b/inethi/core/management/commands/create_superuser.py @@ -0,0 +1,57 @@ +""" +Django command to create a superuser using environment variables. +""" +import os +from django.core.management.base import BaseCommand +from django.contrib.auth import get_user_model +from django.conf import settings + +User = get_user_model() + + +class Command(BaseCommand): + """Django command to create a superuser using environment variables.""" + + help = 'Creates a superuser using SUPERUSER_USERNAME and SUPERUSER_PASSWORD from .env' + + def handle(self, *args, **options): + """Entry point for Django management command.""" + # Get credentials from environment variables + username = os.getenv('SUPERUSER_USERNAME') + password = os.getenv('SUPERUSER_PASSWORD') + + if not username: + self.stdout.write( + self.style.ERROR('SUPERUSER_USERNAME not found in environment variables.') + ) + return + + if not password: + self.stdout.write( + self.style.ERROR('SUPERUSER_PASSWORD not found in environment variables.') + ) + return + + # Check if superuser already exists + if User.objects.filter(username=username).exists(): + self.stdout.write( + self.style.WARNING(f'Superuser with username "{username}" already exists.') + ) + return + + # Create superuser + try: + user = User.objects.create_superuser( + email=f"{username}@inethi.com", + username=username, + password=password + ) + self.stdout.write( + self.style.SUCCESS( + f'Successfully created superuser "{username}" with email "{user.email}"' + ) + ) + except Exception as e: + self.stdout.write( + self.style.ERROR(f'Failed to create superuser: {str(e)}') + ) diff --git a/inethi/core/management/commands/create_users_from_json.py b/inethi/core/management/commands/create_users_from_json.py new file mode 100644 index 0000000..5ab80a3 --- /dev/null +++ b/inethi/core/management/commands/create_users_from_json.py @@ -0,0 +1,277 @@ +""" +Django command to create users from a JSON file with wallets and network admin permissions. +""" +import json +import os +import logging +from django.core.management.base import BaseCommand, CommandError +from django.contrib.auth import get_user_model +from django.db import transaction +from django.conf import settings +from core.models import Wallet +from utils.crypto import encrypt_private_key, decrypt_private_key +from utils.crypto import CryptoUtils + +User = get_user_model() +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + """Django command to create users from JSON file with wallets.""" + + help = 'Creates users from JSON file with wallets and network admin permissions' + + def add_arguments(self, parser): + """Add command arguments.""" + parser.add_argument( + 'json_file', + type=str, + help='Path to JSON file containing user data' + ) + parser.add_argument( + '--dry-run', + action='store_true', + help='Show what would be created without actually creating users' + ) + + def create_wallet_for_user(self, user, wallet_name='default'): + """ + Create a wallet for a user using the same logic as the wallet serializer. + Returns the created wallet or None if creation failed. + """ + try: + # Use utility script to create wallet + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + wallet_info = crypto_utils.create_wallet() + + # Ensure wallet_info contains the expected keys + if 'private_key' not in wallet_info or 'address' not in wallet_info: + logger.error("Wallet creation failed: missing keys in response") + return None + + p_key = wallet_info['private_key'] + w_addr = wallet_info['address'] + encrypted_private_key = encrypt_private_key(p_key) + + wallet_data = { + 'user': user, + 'name': wallet_name, + 'private_key': encrypted_private_key, + 'address': w_addr, + 'token_common_name': 'KRONE', + 'token': 'KRONE', + 'token_type': 'ERC-20', + } + logger.info(f"Wallet data: {wallet_data}") + + if settings.FAUCET_AND_INDEX_ENABLED: + try: + # Add the wallet to the account index for Krone + account_index_creator = Wallet.objects.get( + address=settings.ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS + ) + p_key_admin = decrypt_private_key(account_index_creator.private_key) + + # Create new CryptoUtils instance for registry operations + registry_crypto = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + + # Add the wallet to the account index for Krone + registry_crypto.registry_add(p_key_admin, w_addr) + + # send the account gas + faucet_creator = Wallet.objects.get( + address=settings.FAUCET_ADMIN_WALLET_ADDRESS + ) + p_key_faucet = decrypt_private_key(faucet_creator.private_key) + + # Add small delay between transactions to avoid nonce conflicts + import time + time.sleep(1.0) + + # Create new CryptoUtils instance for faucet operations + faucet_crypto = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + faucet_crypto.faucet_give_to(p_key_faucet, w_addr) + except Exception as e: + logger.error(f"Error during wallet creation (registry/faucet): {e}") + # Continue with wallet creation even if registry/faucet fails + pass + + # Create the wallet + return Wallet.objects.create(**wallet_data) + + except Exception as e: + logger.error(f"Error creating wallet for user {user.username}: {e}") + return None + + def handle(self, *args, **options): + """Entry point for Django management command.""" + json_file = options['json_file'] + dry_run = options['dry_run'] + + # Check if file exists + if not os.path.exists(json_file): + raise CommandError(f'JSON file "{json_file}" does not exist.') + + # Read and parse JSON file + try: + with open(json_file, 'r') as f: + users_data = json.load(f) + except json.JSONDecodeError as e: + raise CommandError(f'Invalid JSON format: {str(e)}') + except Exception as e: + raise CommandError(f'Error reading JSON file: {str(e)}') + + # Validate JSON structure + if not isinstance(users_data, list): + raise CommandError('JSON file must contain a list of user objects.') + + if dry_run: + self.stdout.write(self.style.WARNING('DRY RUN MODE - No users will be created')) + + created_users = [] + errors = [] + + for i, user_data in enumerate(users_data): + try: + # Validate required fields (wallet_address and private_key are now optional) + required_fields = ['username', 'password', 'email'] + missing_fields = [field for field in required_fields if field not in user_data] + + if missing_fields: + errors.append(f'User {i+1}: Missing required fields: {", ".join(missing_fields)}') + continue + + username = user_data['username'] + password = user_data['password'] + email = user_data['email'] + + # Optional fields + first_name = user_data.get('first_name', '') + last_name = user_data.get('last_name', '') + phone_number = user_data.get('phone_number', '') + + # Wallet fields (optional - will be auto-generated if not provided) + wallet_address = user_data.get('wallet_address') + private_key = user_data.get('private_key') + + if dry_run: + if wallet_address: + self.stdout.write( + f'Would create user: {username} ({email}) with existing wallet {wallet_address}' + ) + else: + self.stdout.write( + f'Would create user: {username} ({email}) with auto-generated wallet' + ) + continue + + # Create user and wallet in transaction + with transaction.atomic(): + # Check if user already exists + if User.objects.filter(username=username).exists(): + errors.append(f'User {i+1}: Username "{username}" already exists') + continue + + if User.objects.filter(email=email).exists(): + errors.append(f'User {i+1}: Email "{email}" already exists') + continue + + # Check if wallet address already exists (only if provided) + if wallet_address and Wallet.objects.filter(address=wallet_address).exists(): + errors.append(f'User {i+1}: Wallet address "{wallet_address}" already exists') + continue + + # Create user + user = User.objects.create_user( + email=email, + username=username, + password=password, + first_name=first_name, + last_name=last_name, + phone_number=phone_number + ) + + # Mark as network admin + user.user_permissions.add( + User._meta.get_field('user_permissions').related_model.objects.get( + codename='network_admin' + ) + ) + + # Create wallet + if wallet_address and private_key: + # Use provided wallet details + encrypted_private_key = encrypt_private_key(private_key) + wallet = Wallet.objects.create( + user=user, + name='default', + private_key=encrypted_private_key, + address=wallet_address, + token_common_name='KRONE', + token='KRONE', + token_type='ERC-20' + ) + else: + # Auto-generate wallet + wallet = self.create_wallet_for_user(user) + if not wallet: + errors.append(f'User {i+1}: Failed to create wallet for user "{username}"') + continue + + created_users.append({ + 'username': username, + 'email': email, + 'wallet_address': wallet.address + }) + + if wallet_address: + self.stdout.write( + self.style.SUCCESS( + f'Created user "{username}" with existing wallet {wallet.address}' + ) + ) + else: + self.stdout.write( + self.style.SUCCESS( + f'Created user "{username}" with auto-generated wallet {wallet.address}' + ) + ) + + except Exception as e: + errors.append(f'User {i+1}: {str(e)}') + continue + + # Summary + if dry_run: + self.stdout.write( + self.style.WARNING(f'Would create {len(users_data)} users') + ) + else: + self.stdout.write( + self.style.SUCCESS(f'Successfully created {len(created_users)} users') + ) + + if created_users: + self.stdout.write('\nCreated users:') + for user in created_users: + self.stdout.write(f' - {user["username"]} ({user["email"]})') + + if errors: + self.stdout.write('\nErrors:') + for error in errors: + self.stdout.write(self.style.ERROR(f' - {error}')) diff --git a/inethi/core/migrations/0001_initial.py b/inethi/core/migrations/0001_initial.py index 09ab8b2..befb34b 100644 --- a/inethi/core/migrations/0001_initial.py +++ b/inethi/core/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 5.1 on 2024-10-08 06:32 +# Generated by Django 5.1 on 2025-08-08 13:58 import django.db.models.deletion from django.conf import settings @@ -14,6 +14,33 @@ class Migration(migrations.Migration): ] operations = [ + migrations.CreateModel( + name='SmartContract', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255)), + ('address', models.CharField(max_length=255, unique=True)), + ('description', models.TextField(blank=True)), + ('write_access', models.BooleanField(default=False)), + ('read_access', models.BooleanField(default=False)), + ('contract_type', models.CharField(max_length=255)), + ], + options={ + 'verbose_name': 'Smart Contract', + 'verbose_name_plural': 'Smart Contracts', + }, + ), + migrations.CreateModel( + name='Service', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255)), + ('description', models.TextField(blank=True)), + ('url', models.URLField(unique=True)), + ('type', models.CharField(choices=[('entertainment', 'Entertainment'), ('learning', 'Learning'), ('utility', 'Utility')], default='utility', max_length=50)), + ('paid', models.BooleanField(default=False)), + ], + ), migrations.CreateModel( name='User', fields=[ @@ -25,15 +52,75 @@ class Migration(migrations.Migration): ('username', models.CharField(max_length=255, unique=True)), ('first_name', models.CharField(blank=True, max_length=255, null=True)), ('last_name', models.CharField(blank=True, max_length=255, null=True)), + ('phone_number', models.CharField(blank=True, max_length=15, null=True)), ('is_active', models.BooleanField(default=True)), ('is_staff', models.BooleanField(default=False)), ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.group', verbose_name='groups')), ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.permission', verbose_name='user permissions')), ], options={ - 'abstract': False, + 'permissions': (('network_admin', 'Can administer the network'),), }, ), + migrations.CreateModel( + name='AccountsIndexContract', + fields=[ + ('smartcontract_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.smartcontract')), + ('owner_address', models.CharField(max_length=255)), + ('entry', models.BooleanField(default=False)), + ('entry_count', models.BooleanField(default=False)), + ('is_active', models.BooleanField(default=False)), + ('activate', models.BooleanField(default=False)), + ('deactivate', models.BooleanField(default=False)), + ('add', models.BooleanField(default=False)), + ('remove', models.BooleanField(default=False)), + ], + options={ + 'verbose_name': 'Account Index Smart Contract', + 'verbose_name_plural': 'Account Index Smart Contracts', + }, + bases=('core.smartcontract',), + ), + migrations.CreateModel( + name='FaucetSmartContract', + fields=[ + ('smartcontract_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.smartcontract')), + ('owner_address', models.CharField(max_length=255)), + ('gimme', models.BooleanField(default=False)), + ('give_to', models.BooleanField(default=False)), + ('next_balance', models.BooleanField(default=False)), + ('next_time', models.BooleanField(default=False)), + ('registry_address', models.CharField(blank=True, max_length=255)), + ], + options={ + 'verbose_name': 'Faucet Smart Contract', + 'verbose_name_plural': 'Faucet Smart Contracts', + }, + bases=('core.smartcontract',), + ), + migrations.AddField( + model_name='smartcontract', + name='user', + field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), + ), + migrations.CreateModel( + name='Transaction', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('recipient_address', models.CharField(max_length=256)), + ('sender_address', models.CharField(blank=True, max_length=256, null=True)), + ('amount', models.DecimalField(decimal_places=8, max_digits=18)), + ('transaction_hash', models.CharField(blank=True, max_length=256, null=True)), + ('block_number', models.CharField(blank=True, max_length=256, null=True)), + ('block_hash', models.CharField(blank=True, max_length=256, null=True)), + ('gas_used', models.DecimalField(blank=True, decimal_places=8, max_digits=18, null=True)), + ('category', models.CharField(choices=[('TRANSFER', 'Transfer'), ('INTERNET_COUPON', 'Internet Coupon'), ('REWARD', 'Reward'), ('PAYMENT', 'Payment'), ('OTHER', 'Other')], default='TRANSFER', max_length=50)), + ('timestamp', models.DateTimeField(auto_now_add=True)), + ('token', models.CharField(blank=True, max_length=255, null=True)), + ('recipient', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='received_transactions', to=settings.AUTH_USER_MODEL)), + ('sender', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sent_transactions', to=settings.AUTH_USER_MODEL)), + ], + ), migrations.CreateModel( name='Wallet', fields=[ diff --git a/inethi/network/admin.py b/inethi/network/admin.py index c41c30d..148e4ca 100644 --- a/inethi/network/admin.py +++ b/inethi/network/admin.py @@ -29,5 +29,6 @@ class PingAdmin(admin.ModelAdmin): @admin.register(Network) class NetworkAdmin(admin.ModelAdmin): - list_display = ('name', 'admin', 'created_at') - search_fields = ('name', 'admin__username') + list_display = ('name', 'created_by', 'created_at') + search_fields = ('name', 'created_by__username') + filter_horizontal = ('admins',) diff --git a/inethi/network/management/commands/check_timescaledb.py b/inethi/network/management/commands/check_timescaledb.py new file mode 100644 index 0000000..eeef4f3 --- /dev/null +++ b/inethi/network/management/commands/check_timescaledb.py @@ -0,0 +1,174 @@ +from django.core.management.base import BaseCommand +from django.db import connection +from django.utils import timezone + + +class Command(BaseCommand): + help = 'Check and verify TimescaleDB setup and performance' + + def add_arguments(self, parser): + parser.add_argument( + '--check-hypertables', + action='store_true', + help='Check if hypertables are properly configured', + ) + parser.add_argument( + '--check-aggregates', + action='store_true', + help='Check continuous aggregates status', + ) + parser.add_argument( + '--check-indexes', + action='store_true', + help='Check if indexes are properly created', + ) + parser.add_argument( + '--performance-test', + action='store_true', + help='Run performance tests on queries', + ) + parser.add_argument( + '--all', + action='store_true', + help='Run all checks', + ) + + def handle(self, *args, **options): + if options['all'] or options['check_hypertables']: + self.check_hypertables() + + if options['all'] or options['check_aggregates']: + self.check_continuous_aggregates() + + if options['all'] or options['check_indexes']: + self.check_indexes() + + if options['all'] or options['performance_test']: + self.performance_test() + + def check_hypertables(self): + self.stdout.write(self.style.SUCCESS('Checking hypertables...')) + + with connection.cursor() as cursor: + cursor.execute(""" + SELECT + hypertable_name, + num_chunks, + compression_enabled + FROM timescaledb_information.hypertables + WHERE hypertable_name = 'network_ping'; + """) + + results = cursor.fetchall() + if results: + for row in results: + self.stdout.write(f"Hypertable: {row[0]}") + self.stdout.write(f" Chunks: {row[1]}") + self.stdout.write(f" Compression enabled: {row[2]}") + else: + self.stdout.write( + self.style.ERROR('No hypertable found for network_ping!') + ) + + def check_continuous_aggregates(self): + self.stdout.write(self.style.SUCCESS('Checking continuous aggregates...')) + + with connection.cursor() as cursor: + cursor.execute(""" + SELECT + view_name, + materialized_only, + compression_enabled + FROM timescaledb_information.continuous_aggregates + WHERE view_name LIKE 'network_ping_aggregate_%'; + """) + + results = cursor.fetchall() + if results: + for row in results: + self.stdout.write(f"Continuous aggregate: {row[0]}") + self.stdout.write(f" Materialized only: {row[1]}") + self.stdout.write(f" Compression enabled: {row[2]}") + else: + self.stdout.write( + self.style.ERROR('No continuous aggregates found!') + ) + + def check_indexes(self): + self.stdout.write(self.style.SUCCESS('Checking indexes...')) + + with connection.cursor() as cursor: + cursor.execute(""" + SELECT + indexname, + indexdef + FROM pg_indexes + WHERE tablename = 'network_ping' + AND indexname LIKE 'idx_network_ping_%'; + """) + + results = cursor.fetchall() + if results: + for row in results: + self.stdout.write(f"Index: {row[0]}") + self.stdout.write(f" Definition: {row[1]}") + else: + self.stdout.write( + self.style.ERROR('No TimescaleDB indexes found!') + ) + + def performance_test(self): + self.stdout.write(self.style.SUCCESS('Running performance tests...')) + + # Test 1: Simple time range query + self.stdout.write('Test 1: Time range query (last 24 hours)') + start_time = timezone.now() + + with connection.cursor() as cursor: + cursor.execute(""" + SELECT COUNT(*) + FROM network_ping + WHERE timestamp >= now() - interval '24 hours'; + """) + count = cursor.fetchone()[0] + + end_time = timezone.now() + duration = (end_time - start_time).total_seconds() + self.stdout.write(f" Result: {count} records in {duration:.3f} seconds") + + # Test 2: Aggregation query + self.stdout.write('Test 2: Aggregation query (last hour)') + start_time = timezone.now() + + with connection.cursor() as cursor: + cursor.execute(""" + SELECT + host_id, + AVG(CASE WHEN is_alive THEN 1.0 ELSE 0 END) * 100 as uptime + FROM network_ping + WHERE timestamp >= now() - interval '1 hour' + GROUP BY host_id; + """) + results = cursor.fetchall() + + end_time = timezone.now() + duration = (end_time - start_time).total_seconds() + self.stdout.write(f" Result: {len(results)} hosts in {duration:.3f} seconds") + + # Test 3: Continuous aggregate query + self.stdout.write('Test 3: Continuous aggregate query (15m)') + start_time = timezone.now() + + with connection.cursor() as cursor: + cursor.execute(""" + SELECT COUNT(*) + FROM network_ping_aggregate_15m + WHERE bucket >= now() - interval '24 hours'; + """) + count = cursor.fetchone()[0] + + end_time = timezone.now() + duration = (end_time - start_time).total_seconds() + self.stdout.write( + f" Result: {count} aggregated records in {duration:.3f} seconds" + ) diff --git a/inethi/network/management/commands/refresh_continuous_aggregates.py b/inethi/network/management/commands/refresh_continuous_aggregates.py new file mode 100644 index 0000000..93c0e33 --- /dev/null +++ b/inethi/network/management/commands/refresh_continuous_aggregates.py @@ -0,0 +1,45 @@ +from django.core.management.base import BaseCommand +from django.db import connection + + +class Command(BaseCommand): + help = 'Refresh all continuous aggregates for TimescaleDB' + + def handle(self, *args, **options): + self.stdout.write( + self.style.SUCCESS('Refreshing continuous aggregates...') + ) + + # List of all continuous aggregates + aggregates = [ + 'network_ping_aggregate_15m', + 'network_ping_aggregate_60m', + 'network_ping_aggregate_6h', + 'network_ping_aggregate_12h', + 'network_ping_aggregate_24h', + 'network_ping_aggregate_7d', + 'network_ping_aggregate_30d', + 'network_ping_aggregate_90d', + 'network_ping_aggregate_365d', + ] + + with connection.cursor() as cursor: + for aggregate in aggregates: + try: + self.stdout.write(f"Refreshing {aggregate}...") + cursor.execute( + f"CALL refresh_continuous_aggregate('{aggregate}', NULL, NULL);" + ) + self.stdout.write( + self.style.SUCCESS(f"✓ {aggregate} refreshed successfully") + ) + except Exception as e: + self.stdout.write( + self.style.WARNING( + f"⚠ Could not refresh {aggregate}: {str(e)}" + ) + ) + + self.stdout.write( + self.style.SUCCESS('Continuous aggregates refresh completed!') + ) diff --git a/inethi/network/migrations/0001_initial.py b/inethi/network/migrations/0001_initial.py index 03513fa..4039459 100644 --- a/inethi/network/migrations/0001_initial.py +++ b/inethi/network/migrations/0001_initial.py @@ -1,7 +1,8 @@ -# Generated by Django 5.1 on 2025-02-10 10:01 +# Generated by Django 5.1 on 2025-08-08 13:59 import django.core.validators import django.db.models.deletion +import timescale.db.models.fields from django.conf import settings from django.db import migrations, models @@ -11,31 +12,53 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ('radiusdesk', '0004_voucher_wallet_address_alter_voucher_user'), + ('radiusdesk', '__first__'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ + migrations.CreateModel( + name='Network', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('admin', models.ForeignKey(help_text='User who administrates this network', on_delete=django.db.models.deletion.CASCADE, related_name='networks', to=settings.AUTH_USER_MODEL)), + ], + options={ + 'unique_together': {('name', 'admin')}, + }, + ), migrations.CreateModel( name='Host', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=200, null=True)), - ('ip_address', models.GenericIPAddressField(unique=True)), - ('mac_address', models.CharField(blank=True, max_length=17, null=True, unique=True, validators=[django.core.validators.RegexValidator(message='Enter a valid MAC address in format XX:XX:XX:XX:XX:XX.', regex='^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$')])), + ('ip_address', models.GenericIPAddressField()), + ('mac_address', models.CharField(blank=True, max_length=17, null=True, validators=[django.core.validators.RegexValidator(message='Enter a valid MAC address in format XX:XX:XX:XX:XX:XX.', regex='^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$')])), + ('device_type', models.CharField(choices=[('unknown', 'Unknown'), ('dns_server', 'DNS Server'), ('server', 'Server'), ('firewall', 'Firewall'), ('access_point', 'Access Point'), ('switch', 'Switch')], default='unknown', help_text='Select the type of device (DNS Server, Server, Firewall, Access Point, Switch)', max_length=20)), ('cloud', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='hosts', to='radiusdesk.cloud')), ('radiusdesk_instance', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='hosts', to='radiusdesk.radiusdeskinstance')), ('realm', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='hosts', to='radiusdesk.realm')), ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='hosts', to=settings.AUTH_USER_MODEL)), + ('network', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='hosts', to='network.network')), ], + options={ + 'unique_together': {('network', 'ip_address')}, + }, ), migrations.CreateModel( name='Ping', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('time', timescale.db.models.fields.TimescaleDateTimeField(interval='1 day')), ('is_alive', models.BooleanField(default=False)), ('timestamp', models.DateTimeField(auto_now_add=True)), ('host', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ping_results', to='network.host')), + ('network', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='pings', to='network.network')), ], + options={ + 'unique_together': {('timestamp', 'id')}, + }, ), ] diff --git a/inethi/network/migrations/0002_auto_20250811_1700.py b/inethi/network/migrations/0002_auto_20250811_1700.py new file mode 100644 index 0000000..bcd3e6f --- /dev/null +++ b/inethi/network/migrations/0002_auto_20250811_1700.py @@ -0,0 +1,118 @@ +# Generated by Django 5.1 on 2025-08-11 15:00 + +from django.db import migrations, models +import django.db.models.deletion +from django.conf import settings + + +def migrate_admin_to_created_by_and_admins(apps, schema_editor): + """ + Migrate data from the old 'admin' field to the new 'created_by' and 'admins' fields. + The original admin becomes both the creator and the first admin. + """ + Network = apps.get_model('network', 'Network') + + for network in Network.objects.all(): + # Get the old admin value from the database directly + # We need to use raw SQL because the field will be renamed + from django.db import connection + with connection.cursor() as cursor: + cursor.execute( + "SELECT admin_id FROM network_network WHERE id = %s", + [network.id] + ) + result = cursor.fetchone() + if result and result[0]: + old_admin_id = result[0] + # Set created_by to the old admin + network.created_by_id = old_admin_id + network.save() + # Add the old admin to the admins many-to-many field + network.admins.add(old_admin_id) + + +def reverse_migrate_created_by_to_admin(apps, schema_editor): + """ + Reverse migration: set the admin field to the created_by value. + Note: This will lose information about additional admins. + """ + Network = apps.get_model('network', 'Network') + + for network in Network.objects.all(): + if network.created_by: + # We can't directly set the admin field as it's being removed, + # but we can update the database directly + from django.db import connection + with connection.cursor() as cursor: + cursor.execute( + "UPDATE network_network SET admin_id = %s WHERE id = %s", + [network.created_by.id, network.id] + ) + + +class Migration(migrations.Migration): + + dependencies = [ + ('network', '0001_initial'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + # Step 1: Remove the old unique constraint first + migrations.AlterUniqueTogether( + name='network', + unique_together=set(), + ), + + # Step 2: Add the new fields + migrations.AddField( + model_name='network', + name='created_by', + field=models.ForeignKey( + help_text='User who created this network', + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name='created_networks', + to=settings.AUTH_USER_MODEL + ), + ), + migrations.AddField( + model_name='network', + name='admins', + field=models.ManyToManyField( + help_text='Users who can administrate this network', + related_name='administered_networks', + to=settings.AUTH_USER_MODEL + ), + ), + + # Step 3: Migrate the data + migrations.RunPython( + migrate_admin_to_created_by_and_admins, + reverse_migrate_created_by_to_admin, + ), + + # Step 4: Make created_by non-nullable + migrations.AlterField( + model_name='network', + name='created_by', + field=models.ForeignKey( + help_text='User who created this network', + on_delete=django.db.models.deletion.CASCADE, + related_name='created_networks', + to=settings.AUTH_USER_MODEL + ), + ), + + # Step 5: Remove the old admin field + migrations.RemoveField( + model_name='network', + name='admin', + ), + + # Step 6: Add the new unique constraint + migrations.AlterUniqueTogether( + name='network', + unique_together={('name', 'created_by')}, + ), + ] diff --git a/inethi/network/models.py b/inethi/network/models.py index 2f06e6b..6616e3f 100644 --- a/inethi/network/models.py +++ b/inethi/network/models.py @@ -1,6 +1,7 @@ from django.db import models from django.core.validators import RegexValidator from django.conf import settings +from timescale.db.models.models import TimescaleModel from radiusdesk.models import ( Cloud, RadiusDeskInstance, @@ -16,11 +17,16 @@ class Network(models.Model): name = models.CharField(max_length=255) - admin = models.ForeignKey( + created_by = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, - related_name="networks", - help_text="User who administrates this network" + related_name="created_networks", + help_text="User who created this network" + ) + admins = models.ManyToManyField( + settings.AUTH_USER_MODEL, + related_name="administered_networks", + help_text="Users who can administrate this network" ) created_at = models.DateTimeField(auto_now_add=True) @@ -28,7 +34,7 @@ def __str__(self): return self.name class Meta: - unique_together = (("name", "admin"),) + unique_together = (("name", "created_by"),) class Host(models.Model): @@ -108,7 +114,7 @@ class Meta: ) -class Ping(models.Model): +class Ping(TimescaleModel): host = models.ForeignKey( Host, on_delete=models.CASCADE, @@ -128,3 +134,11 @@ class Ping(models.Model): def __str__(self): status = "Alive" if self.is_alive else "Down" return f"{self.host} at {self.timestamp}: {status}" + + class Meta: + # TimescaleDB requires the time column to be part of the primary key + unique_together = (('timestamp', 'id'),) + + class TimescaleMeta: + # Tell TimescaleDB to use 'timestamp' as the time column instead of 'time' + time_column = 'timestamp' diff --git a/inethi/network/serializers.py b/inethi/network/serializers.py index 579d33d..9d716d9 100644 --- a/inethi/network/serializers.py +++ b/inethi/network/serializers.py @@ -35,4 +35,4 @@ class NetworkSerializer(serializers.ModelSerializer): class Meta: model = Network fields = '__all__' - read_only_fields = ['admin'] + read_only_fields = ['created_by'] diff --git a/inethi/network/views.py b/inethi/network/views.py index 2fccd98..a146932 100644 --- a/inethi/network/views.py +++ b/inethi/network/views.py @@ -50,7 +50,7 @@ def update_host_by_identifier(request): # Look up the network by name and ensure the requesting user is its admin. try: - network = Network.objects.get(name=network_name, admin=request.user) + network = Network.objects.get(name=network_name, admins=request.user) print(network.id) except Network.DoesNotExist: return Response( @@ -119,7 +119,7 @@ def delete_host_by_identifier(request): # Look up the network by name for the current user. try: - network = Network.objects.get(name=network_name, admin=request.user) + network = Network.objects.get(name=network_name, admins=request.user) except Network.DoesNotExist: return Response( {"error": "Network not found or not authorized."}, @@ -165,15 +165,18 @@ def aggregate_ping_view(request): - host_ids (optional): Comma-separated list of host IDs (integers). - aggregation (optional): One of "15m", "60m", "6h", "12h", "24h", "7d", "30d", "90d", "365d". Defaults to "15m". + - time_range (optional): Time range filter (e.g., "24 hours", "7 days"). + Defaults to "24 hours". Example URLs: /api/ping-aggregates/?aggregation=15m - /api/ping-aggregates/?host_ids=1,2,3&aggregation=60m + /api/ping-aggregates/?host_ids=1,2,3&aggregation=60m&time_range=7 days """ # Get query parameters from DRF's request.query_params. host_ids_param = request.query_params.get('host_ids') aggregation_param = request.query_params.get('aggregation', '15m') network_id = request.query_params.get('network_id') + time_range = request.query_params.get('time_range', '24 hours') # Map allowed aggregation values to your materialized view names. valid_aggregations = { @@ -199,7 +202,7 @@ def aggregate_ping_view(request): # If network_id is provided, override host_ids. if network_id: try: - network = Network.objects.get(id=network_id, admin=request.user) + network = Network.objects.get(id=network_id, admins=request.user) except Network.DoesNotExist: return Response( {"error": "Network not found or not authorized."}, @@ -219,27 +222,26 @@ def aggregate_ping_view(request): status=status.HTTP_400_BAD_REQUEST ) - # Build the SQL query. + # Build the optimized SQL query with time range filtering + base_query = f""" + SELECT bucket, host_id, uptime_percentage, total_pings + FROM {table_name} + WHERE bucket >= now() - interval %s + """ + params = [time_range] + if host_ids: # Create a list of placeholders for each host id. placeholders = ','.join(['%s'] * len(host_ids)) - query = ( - f"SELECT bucket, host_id, uptime_percentage, total_pings " - f"FROM {table_name} WHERE host_id IN ({placeholders}) " - f"ORDER BY bucket;" - ) - params = host_ids - else: - query = ( - f"SELECT bucket, host_id, uptime_percentage, total_pings " - f"FROM {table_name} ORDER BY bucket;" - ) - params = [] + base_query += f" AND host_id IN ({placeholders})" + params.extend(host_ids) + + base_query += " ORDER BY bucket DESC, host_id;" # Execute the query using Django's database connection. try: with connection.cursor() as cursor: - cursor.execute(query, params) + cursor.execute(base_query, params) columns = [col[0] for col in cursor.description] results = [dict(zip(columns, row)) for row in cursor.fetchall()] except Exception as e: @@ -276,7 +278,7 @@ def aggregate_uptime_view(request): host_ids = [] if network_id: try: - network = Network.objects.get(id=network_id, admin=request.user) + network = Network.objects.get(id=network_id, admins=request.user) except Network.DoesNotExist: return Response( {"error": "Network not found or not authorized."}, @@ -292,7 +294,7 @@ def aggregate_uptime_view(request): status=status.HTTP_400_BAD_REQUEST ) - # Build the SQL query to aggregate data per host over the entire period. + # Build the optimized SQL query using TimescaleDB time_bucket for better performance sql = """ SELECT host_id, @@ -419,7 +421,7 @@ def device_uptime_line_view(request): if network_id: try: - network = Network.objects.get(id=network_id, admin=request.user) + network = Network.objects.get(id=network_id, admins=request.user) except Network.DoesNotExist: return Response( {"error": "Network not found or not authorized."}, @@ -557,7 +559,7 @@ def get_queryset(self): qs = Host.objects.all() else: # For network admins, only show hosts in networks they manage. - qs = Host.objects.filter(network__admin=user) + qs = Host.objects.filter(network__admins=user) # Optionally filter by a network id passed as query parameter. network_id = self.request.query_params.get("network_id") if network_id: @@ -569,7 +571,7 @@ def perform_create(self, serializer): # If a network admin is creating a host, ensure network is one they manage. if user.has_perm('core.network_admin') and not user.is_superuser: network = serializer.validated_data.get('network') - if not network or network.admin != user: + if not network or user not in network.admins.all(): raise PermissionDenied("Unauthorized to add hosts to this network.") serializer.save() @@ -599,10 +601,13 @@ def get_queryset(self): user = self.request.user if user.is_superuser or not user.has_perm('core.network_admin'): return Network.objects.all() - return Network.objects.filter(admin=user) + return Network.objects.filter(admins=user) def perform_create(self, serializer): - serializer.save(admin=self.request.user) + serializer.save(created_by=self.request.user) + # Add the creator as the first admin + network = serializer.instance + network.admins.add(self.request.user) @action(detail=True, methods=["get"]) def hosts(self, request, pk=None): @@ -657,7 +662,7 @@ def ingest_uptime_data(request): # network admin (and not a superuser), ensure they manage this network. if request.user.has_perm('core.network_admin') and not request.user.is_superuser: - if network.admin != request.user: + if request.user not in network.admins.all(): return Response( {"error": "You are not authorized to ingest data for this network."}, status=status.HTTP_403_FORBIDDEN diff --git a/inethi/radiusdesk/migrations/0001_initial.py b/inethi/radiusdesk/migrations/0001_initial.py index ec21e8d..623507c 100644 --- a/inethi/radiusdesk/migrations/0001_initial.py +++ b/inethi/radiusdesk/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 5.1 on 2025-01-23 18:37 +# Generated by Django 5.1 on 2025-08-08 13:59 import django.db.models.deletion from django.conf import settings @@ -22,7 +22,9 @@ class Migration(migrations.Migration): ('base_url', models.URLField(help_text='Base URL for the RADIUSdesk instance')), ('username', models.CharField(max_length=255)), ('password', models.CharField(max_length=255)), - ('token', models.CharField(default='', max_length=255)), + ('token', models.CharField(blank=True, default='', max_length=255)), + ('accepts_crypto', models.BooleanField(default=False)), + ('administrators', models.ManyToManyField(blank=True, help_text='Users who have network administrator rights for this instance', related_name='admin_radiusdesk_instances', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( @@ -40,8 +42,8 @@ class Migration(migrations.Migration): ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('radius_desk_id', models.IntegerField()), + ('cloud', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='realms', to='radiusdesk.cloud')), ('radius_desk_instance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='realms', to='radiusdesk.radiusdeskinstance')), - ('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='realms', to='radiusdesk.cloud')), ], ), migrations.CreateModel( @@ -57,6 +59,7 @@ class Migration(migrations.Migration): ('speed_limit_mbs', models.FloatField(default=0)), ('limit_session_enabled', models.BooleanField(default=False)), ('session_limit', models.IntegerField(default=0)), + ('cost', models.FloatField(default=0)), ('cloud', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='profiles', to='radiusdesk.cloud')), ('radius_desk_instance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='profiles', to='radiusdesk.radiusdeskinstance')), ('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='profiles', to='radiusdesk.realm')), @@ -67,11 +70,12 @@ class Migration(migrations.Migration): fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('voucher_code', models.CharField(max_length=255)), + ('wallet_address', models.CharField(blank=True, max_length=255, null=True)), ('created_at', models.DateTimeField(auto_now_add=True)), ('cloud', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vouchers', to='radiusdesk.cloud')), ('radius_desk_instance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vouchers', to='radiusdesk.radiusdeskinstance')), ('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vouchers', to='radiusdesk.realm')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vouchers', to=settings.AUTH_USER_MODEL)), + ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='vouchers', to=settings.AUTH_USER_MODEL)), ], ), ] diff --git a/inethi/radiusdesk/serializers.py b/inethi/radiusdesk/serializers.py index e52ee29..0cfc585 100644 --- a/inethi/radiusdesk/serializers.py +++ b/inethi/radiusdesk/serializers.py @@ -36,6 +36,54 @@ class VoucherSerializer(serializers.ModelSerializer): radius_desk_instance_name = serializers.CharField( source="radius_desk_instance.name", read_only=True ) + profile_name = serializers.SerializerMethodField() + profile_data_limit_gb = serializers.SerializerMethodField() + profile_data_limit_enabled = serializers.SerializerMethodField() + profile_speed_limit_mbs = serializers.SerializerMethodField() + profile_speed_limit_enabled = serializers.SerializerMethodField() + profile_cost = serializers.SerializerMethodField() + + def get_profile_name(self, obj): + """Get the profile name for this voucher through its realm.""" + profile = obj.realm.profiles.filter( + radius_desk_instance=obj.radius_desk_instance + ).first() + return profile.name if profile else None + + def get_profile_data_limit_gb(self, obj): + """Get the profile data limit in GB.""" + profile = obj.realm.profiles.filter( + radius_desk_instance=obj.radius_desk_instance + ).first() + return profile.data_limit_gb if profile else None + + def get_profile_data_limit_enabled(self, obj): + """Get whether data limit is enabled for the profile.""" + profile = obj.realm.profiles.filter( + radius_desk_instance=obj.radius_desk_instance + ).first() + return profile.data_limit_enabled if profile else False + + def get_profile_speed_limit_mbs(self, obj): + """Get the profile speed limit in MB/s.""" + profile = obj.realm.profiles.filter( + radius_desk_instance=obj.radius_desk_instance + ).first() + return profile.speed_limit_mbs if profile else None + + def get_profile_speed_limit_enabled(self, obj): + """Get whether speed limit is enabled for the profile.""" + profile = obj.realm.profiles.filter( + radius_desk_instance=obj.radius_desk_instance + ).first() + return profile.speed_limit_enabled if profile else False + + def get_profile_cost(self, obj): + """Get the profile cost.""" + profile = obj.realm.profiles.filter( + radius_desk_instance=obj.radius_desk_instance + ).first() + return profile.cost if profile else None class Meta: model = Voucher diff --git a/inethi/radiusdesk/views.py b/inethi/radiusdesk/views.py index d854656..9eedbf0 100644 --- a/inethi/radiusdesk/views.py +++ b/inethi/radiusdesk/views.py @@ -3,7 +3,8 @@ from rest_framework.response import Response from rest_framework.decorators import action from rest_framework.permissions import IsAuthenticated - +from rest_framework.pagination import PageNumberPagination +import logging from decimal import Decimal from .models import ( @@ -24,7 +25,8 @@ check_token, login, create_voucher, - fetch_vouchers + fetch_vouchers, + fetch_voucher_stats ) from utils.keycloak import KeycloakAuthentication from utils.super_user_or_api_key import IsSuperUserOrAPIKeyUser @@ -36,6 +38,15 @@ from core.models import User from core.models import Transaction +logger = logging.getLogger(__name__) + + +class VoucherPagination(PageNumberPagination): + """Custom pagination for voucher endpoints.""" + page_size = 20 + page_size_query_param = 'page_size' + max_page_size = 100 + class RadiusDeskInstanceViewSet(viewsets.ModelViewSet): queryset = RadiusDeskInstance.objects.all() @@ -153,6 +164,7 @@ class VoucherViewSet(viewsets.ModelViewSet): serializer_class = VoucherSerializer authentication_classes = [KeycloakOrAPIKeyAuthentication] permission_classes = [IsAuthenticated] + pagination_class = VoucherPagination @action(detail=False, methods=['get']) def voucher_stats(self, request): @@ -229,14 +241,16 @@ def voucher_stats(self, request): def user_vouchers(self, request): """ Retrieve vouchers for the authenticated user, - ordered by the latest first. + ordered by the latest first with pagination. """ user = request.user - vouchers = (Voucher.objects.filter(user=user) - .order_by('-created_at') - ) # Order by created_at descending - serializer = self.get_serializer(vouchers, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) + vouchers = Voucher.objects.filter(user=user).order_by('-created_at') + + # Use pagination + paginator = VoucherPagination() + page = paginator.paginate_queryset(vouchers, request) + serializer = self.get_serializer(page, many=True) + return paginator.get_paginated_response(serializer.data) @action( detail=False, @@ -245,7 +259,7 @@ def user_vouchers(self, request): ) def search_vouchers(self, request): """ - Search vouchers by wallet_address or username. + Search vouchers by wallet_address or username with pagination. At least one of these parameters must be provided. """ @@ -287,24 +301,24 @@ def search_vouchers(self, request): status=status.HTTP_400_BAD_REQUEST ) - vouchers = Voucher.objects.all() + # Start with a base queryset and apply filters efficiently + vouchers = Voucher.objects.filter( + radius_desk_instance=radius_desk_instance_pk, + cloud=radius_desk_cloud_pk + ) if wallet_address: - vouchers = vouchers.filter( - wallet_address__icontains=wallet_address, - radius_desk_instance=radius_desk_instance_pk, - cloud=radius_desk_cloud_pk - ) + vouchers = vouchers.filter(wallet_address__icontains=wallet_address) if username: - vouchers = vouchers.filter( - user__username__icontains=username, - radius_desk_instance=radius_desk_instance_pk, - cloud=radius_desk_cloud_pk - ) + vouchers = vouchers.filter(user__username__icontains=username) vouchers = vouchers.order_by('-created_at') - serializer = self.get_serializer(vouchers, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) + + # Use pagination + paginator = VoucherPagination() + page = paginator.paginate_queryset(vouchers, request) + serializer = self.get_serializer(page, many=True) + return paginator.get_paginated_response(serializer.data) @action( detail=False, @@ -375,7 +389,7 @@ def add_voucher(self, request): category = request.data.get('category', 'INTERNET_COUPON') token = request.data.get('token') - voucher = create_voucher( + voucher_response = create_voucher( token=radius_desk_token, base_url=radius_desk_base_url, cloud_id=radius_desk_cloud_db.radius_desk_id, @@ -384,6 +398,16 @@ def add_voucher(self, request): quantity=quantity, ) + # Handle single vs multiple vouchers + if quantity == 1: + # Single voucher - voucher_response is just the voucher code + voucher_codes = [voucher_response] + else: + # Multiple vouchers - extract voucher codes from JSON response + voucher_codes = [voucher['name'] for voucher in voucher_response['data']] + + created_vouchers = [] + if sender_address and recipient_address and amount and token: Transaction.objects.create( recipient_address=recipient_address, @@ -392,27 +416,41 @@ def add_voucher(self, request): category=category, token=token ) - Voucher.objects.create( - voucher_code=voucher, - realm=radius_desk_realm_db, - cloud=radius_desk_cloud_db, - radius_desk_instance=radius_desk_instance_db, - wallet_address=sender_address, + + # Create a voucher record for each voucher code + for voucher_code in voucher_codes: + voucher_obj = Voucher.objects.create( + voucher_code=voucher_code, + realm=radius_desk_realm_db, + cloud=radius_desk_cloud_db, + radius_desk_instance=radius_desk_instance_db, + wallet_address=sender_address, + ) + created_vouchers.append(voucher_obj) + else: + # Create a voucher record for each voucher code + for voucher_code in voucher_codes: + voucher_obj = Voucher.objects.create( + voucher_code=voucher_code, + realm=radius_desk_realm_db, + cloud=radius_desk_cloud_db, + radius_desk_instance=radius_desk_instance_db, + user=user_db + ) + created_vouchers.append(voucher_obj) + + # Return appropriate response based on quantity + if quantity == 1: + return Response( + {'voucher': voucher_codes[0]}, + status=status.HTTP_201_CREATED ) else: - Voucher.objects.create( - voucher_code=voucher, - realm=radius_desk_realm_db, - cloud=radius_desk_cloud_db, - radius_desk_instance=radius_desk_instance_db, - user=user_db + return Response( + {'vouchers': voucher_codes, 'count': len(voucher_codes)}, + status=status.HTTP_201_CREATED ) - return Response( - {'voucher': voucher}, - status=status.HTTP_201_CREATED - ) - except Exception as e: return Response( {"error": str(e)}, @@ -431,11 +469,15 @@ def wallet_address_vouchers(self, request): status=status.HTTP_400_BAD_REQUEST ) try: - vouchers = (Voucher.objects.filter( - wallet_address=wallet_address) - .order_by('-created_at')) - serializer = self.get_serializer(vouchers, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) + vouchers = Voucher.objects.filter( + wallet_address=wallet_address + ).order_by('-created_at') + + # Use pagination + paginator = VoucherPagination() + page = paginator.paginate_queryset(vouchers, request) + serializer = self.get_serializer(page, many=True) + return paginator.get_paginated_response(serializer.data) except Exception as e: return Response({ "error": f"unexpected error {e}"}, @@ -535,6 +577,155 @@ def get_all_vouchers_stats(self, request): status=status.HTTP_400_BAD_REQUEST ) + @action( + detail=False, + methods=['get'], + permission_classes=[IsSuperUserOrAPIKeyUserOrNetworkAdmin] + ) + def get_all_vouchers_stats_db(self, request): + """ + Get all vouchers statistics from database with pagination. + This is the main method for getting voucher stats now. + """ + try: + radius_desk_instance_pk = request.query_params.get( + 'radius_desk_instance_pk' + ) + radius_desk_cloud_pk = request.query_params.get( + 'radius_desk_cloud_pk' + ) + + if not radius_desk_instance_pk or not radius_desk_cloud_pk: + return Response( + { + "error": + "Missing required parameters: " + "radius_desk_instance_pk and radius_desk_cloud_pk" + }, + status=status.HTTP_400_BAD_REQUEST + ) + + radius_desk_instance_db = RadiusDeskInstance.objects.get( + pk=radius_desk_instance_pk + ) + # If network admin (and not a superuser), ensure they're associated + if (request.user.has_perm('core.network_admin') + and not request.user.is_superuser): + if not radius_desk_instance_db.administrators.filter( + pk=request.user.pk + ).exists(): + return Response( + {"error": "Unauthorized for this RadiusDeskInstance."}, + status=status.HTTP_403_FORBIDDEN + ) + + # Get vouchers from database with pagination + vouchers = Voucher.objects.filter( + radius_desk_instance=radius_desk_instance_pk, + cloud=radius_desk_cloud_pk + ).order_by('-created_at') + + # Use pagination + paginator = VoucherPagination() + page = paginator.paginate_queryset(vouchers, request) + serializer = self.get_serializer(page, many=True) + logger.info(f"Vouchers: {serializer.data}") + return paginator.get_paginated_response(serializer.data) + + except Exception as e: + return Response( + {"error": str(e)}, + status=status.HTTP_400_BAD_REQUEST + ) + + @action( + detail=False, + methods=['get'], + permission_classes=[IsSuperUserOrAPIKeyUserOrNetworkAdmin] + ) + def get_voucher_stats_detailed(self, request): + """ + Get detailed statistics for a specific voucher using the radaccts endpoint. + This provides comprehensive usage data including data transfer, session times, etc. + """ + voucher_code = request.query_params.get("voucher_code") + radius_desk_instance_pk = request.query_params.get("radius_desk_instance_pk") + radius_desk_cloud_pk = request.query_params.get("radius_desk_cloud_pk") + + if not voucher_code or not radius_desk_instance_pk or not radius_desk_cloud_pk: + return Response( + {"error": "voucher_code, radius_desk_instance_pk, and radius_desk_cloud_pk are required."}, + status=status.HTTP_400_BAD_REQUEST + ) + + try: + # Get voucher from database + voucher = Voucher.objects.filter( + voucher_code=voucher_code, + radius_desk_instance=radius_desk_instance_pk, + cloud=radius_desk_cloud_pk + ).first() + + if not voucher: + return Response( + {"error": "Voucher not found in database."}, + status=status.HTTP_404_NOT_FOUND + ) + + # Get RADIUSdesk instance and cloud + instance = RadiusDeskInstance.objects.get(pk=radius_desk_instance_pk) + cloud_obj = Cloud.objects.get(pk=radius_desk_cloud_pk) + + # Validate or refresh the token if necessary + token_valid = check_token(instance.token, instance.base_url) + if not token_valid: + instance.token = login( + username=instance.username, + password=instance.password, + base_url=instance.base_url + ) + instance.save() + + # Fetch detailed stats from RADIUSdesk API + voucher_stats_response = fetch_voucher_stats( + token=instance.token, + voucher_code=voucher_code, + cloud_id=cloud_obj.radius_desk_id, + base_url=instance.base_url + ) + + # Get profile information + profile = voucher.realm.profiles.filter( + radius_desk_instance=instance + ).first() + + # Calculate usage percentage if profile has data limits + usage_percentage = None + if profile and profile.data_limit_enabled and profile.data_limit_gb > 0: + total_data_bytes = voucher_stats_response.get('metaData', {}).get('totalInOut') + if total_data_bytes and total_data_bytes != 'null': + total_data_gb = float(total_data_bytes) / (1024 * 1024 * 1024) + usage_percentage = (total_data_gb / profile.data_limit_gb) * 100 + + # Prepare response data + response_data = { + "voucher_code": voucher_code, + "profile_name": profile.name if profile else None, + "data_limit_gb": profile.data_limit_gb if profile else None, + "data_limit_enabled": profile.data_limit_enabled if profile else False, + "usage_percentage": round(usage_percentage, 2) if usage_percentage else None, + "total_sessions": voucher_stats_response.get('metaData', {}).get('totalCount', 0), + "total_data_in": voucher_stats_response.get('metaData', {}).get('totalIn'), + "total_data_out": voucher_stats_response.get('metaData', {}).get('totalOut'), + "total_data_inout": voucher_stats_response.get('metaData', {}).get('totalInOut'), + "sessions": voucher_stats_response.get('items', []) + } + + return Response(response_data, status=status.HTTP_200_OK) + + except Exception as e: + return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST) + class NetworkAdminVoucherViewSet(viewsets.ReadOnlyModelViewSet): """ @@ -545,8 +736,9 @@ class NetworkAdminVoucherViewSet(viewsets.ReadOnlyModelViewSet): authentication_classes = [KeycloakOrAPIKeyAuthentication] permission_classes = [IsAuthenticated] queryset = Voucher.objects.all() + pagination_class = VoucherPagination def get_queryset(self): user = self.request.user # Return vouchers only for instances where the user is an admin. - return Voucher.objects.filter(radius_desk_instance__administrators=user) + return Voucher.objects.filter(radius_desk_instance__administrators=user).order_by('-created_at') diff --git a/inethi/reward/migrations/0001_initial.py b/inethi/reward/migrations/0001_initial.py index c406e76..46ad753 100644 --- a/inethi/reward/migrations/0001_initial.py +++ b/inethi/reward/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 5.1 on 2025-02-18 12:30 +# Generated by Django 5.1 on 2025-08-08 13:59 import django.db.models.deletion from django.conf import settings @@ -10,8 +10,8 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ('core', '0012_transaction_sender_address_transaction_token_and_more'), - ('network', '0005_host_device_type'), + ('core', '0001_initial'), + ('network', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] @@ -23,11 +23,13 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='Reward Name', max_length=255)), ('reward_type', models.CharField(choices=[('uptime', 'Uptime Based'), ('custom', 'Custom')], default='uptime', max_length=20)), ('reward_amount', models.DecimalField(decimal_places=10, max_digits=20)), + ('interval_minutes', models.IntegerField(blank=True, help_text='Interval for recurring rewards in minutes', null=True)), ('is_cancelled', models.BooleanField(default=False)), ('created_at', models.DateTimeField(auto_now_add=True)), ('once_off', models.BooleanField(default=True)), ('celery_task_id', models.CharField(blank=True, max_length=255, null=True)), ('device', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='rewards', to='network.host')), + ('network', models.ForeignKey(blank=True, help_text='Network associated with this reward', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='rewards', to='network.network')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='rewards', to=settings.AUTH_USER_MODEL)), ], ), @@ -38,7 +40,7 @@ class Migration(migrations.Migration): ('uptime_seconds', models.IntegerField(help_text='Total uptime in seconds')), ('percentage_awarded', models.DecimalField(decimal_places=2, help_text='Percentage of total reward', max_digits=5)), ('created_at', models.DateTimeField(auto_now_add=True)), - ('reward', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='reward_transaction', to='reward.reward')), + ('reward', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reward_transactions', to='reward.reward')), ('transaction', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='reward_transaction', to='core.transaction')), ], ), diff --git a/inethi/reward/tasks.py b/inethi/reward/tasks.py index a5f5954..c8689bc 100644 --- a/inethi/reward/tasks.py +++ b/inethi/reward/tasks.py @@ -93,10 +93,10 @@ def process_reward(reward_id): # Send tokens try: tx_receipt = crypto_utils.send_to_wallet_address( - from_address=admin_wallet.address, - private_key=admin_private_key, - to_address=device_wallet.address, - amount=awarded_amount + admin_wallet.address, + admin_private_key, + device_wallet.address, + awarded_amount ) tx_hash = None if ( diff --git a/inethi/sample_radiusdesk_config.json b/inethi/sample_radiusdesk_config.json new file mode 100644 index 0000000..d6074e0 --- /dev/null +++ b/inethi/sample_radiusdesk_config.json @@ -0,0 +1,148 @@ +[ + { + "name": "Main RADIUSdesk Instance", + "base_url": "https://radiusdesk.example.com", + "username": "admin", + "password": "admin_password", + "accepts_crypto": true, + "administrators": ["devuser", "network_admin"], + "clouds": [ + { + "name": "Main Cloud", + "radius_desk_id": 1, + "realms": [ + { + "name": "Default Realm", + "radius_desk_id": 1, + "profiles": [ + { + "name": "Basic Plan", + "radius_desk_id": 1, + "data_limit_enabled": true, + "data_limit_gb": 5.0, + "data_limit_reset": "monthly", + "speed_limit_enabled": true, + "speed_limit_mbs": 10.0, + "limit_session_enabled": false, + "session_limit": 0, + "cost": 0.0 + }, + { + "name": "Premium Plan", + "radius_desk_id": 2, + "data_limit_enabled": true, + "data_limit_gb": 20.0, + "data_limit_reset": "monthly", + "speed_limit_enabled": true, + "speed_limit_mbs": 50.0, + "limit_session_enabled": false, + "session_limit": 0, + "cost": 10.0 + }, + { + "name": "Unlimited Plan", + "radius_desk_id": 3, + "data_limit_enabled": false, + "data_limit_gb": 0, + "data_limit_reset": "never", + "speed_limit_enabled": true, + "speed_limit_mbs": 100.0, + "limit_session_enabled": true, + "session_limit": 1, + "cost": 25.0 + } + ] + }, + { + "name": "Guest Realm", + "radius_desk_id": 2, + "profiles": [ + { + "name": "Guest Access", + "radius_desk_id": 4, + "data_limit_enabled": true, + "data_limit_gb": 1.0, + "data_limit_reset": "daily", + "speed_limit_enabled": true, + "speed_limit_mbs": 5.0, + "limit_session_enabled": true, + "session_limit": 2, + "cost": 0.0 + } + ] + } + ] + }, + { + "name": "Secondary Cloud", + "radius_desk_id": 2, + "realms": [ + { + "name": "Business Realm", + "radius_desk_id": 3, + "profiles": [ + { + "name": "Business Basic", + "radius_desk_id": 5, + "data_limit_enabled": true, + "data_limit_gb": 50.0, + "data_limit_reset": "monthly", + "speed_limit_enabled": true, + "speed_limit_mbs": 25.0, + "limit_session_enabled": false, + "session_limit": 0, + "cost": 15.0 + }, + { + "name": "Business Pro", + "radius_desk_id": 6, + "data_limit_enabled": true, + "data_limit_gb": 100.0, + "data_limit_reset": "monthly", + "speed_limit_enabled": true, + "speed_limit_mbs": 100.0, + "limit_session_enabled": true, + "session_limit": 5, + "cost": 35.0 + } + ] + } + ] + } + ] + }, + { + "name": "Test RADIUSdesk Instance", + "base_url": "https://test-radiusdesk.example.com", + "username": "test_admin", + "password": "test_password", + "accepts_crypto": false, + "administrators": ["devuser"], + "clouds": [ + { + "name": "Test Cloud", + "radius_desk_id": 1, + "realms": [ + { + "name": "Test Realm", + "radius_desk_id": 1, + "profiles": [ + { + "name": "Test Profile", + "radius_desk_id": 1, + "data_limit_enabled": false, + "data_limit_gb": 0, + "data_limit_reset": "never", + "speed_limit_enabled": false, + "speed_limit_mbs": 0, + "limit_session_enabled": false, + "session_limit": 0, + "cost": 0.0 + } + ] + } + ] + } + ] + } +] diff --git a/inethi/smart_contracts.json b/inethi/smart_contracts.json new file mode 100644 index 0000000..0d97630 --- /dev/null +++ b/inethi/smart_contracts.json @@ -0,0 +1,20 @@ +[ + { + "name": "iNethi Registry", + "address": "0x9b1a3F8FadD0dc86FBae5Cf66Fa682fDcd84a9b0", + "contract_type": "account index", + "description": "iNethi account registry for celo eth faucet", + "write_access": false, + "read_access": false, + "user_name": "inethiofficial" + }, + { + "name": "iNethi Faucet", + "address": "0xB821E49ADB53F0AbeD834278d5dFc57901c30Eea", + "contract_type": "eth faucet", + "description": "iNethi faucet for celo", + "write_access": false, + "read_access": false, + "user_name": "inethiofficial" + } +] diff --git a/inethi/smart_contracts/views.py b/inethi/smart_contracts/views.py index 48532bf..8a1ec4f 100644 --- a/inethi/smart_contracts/views.py +++ b/inethi/smart_contracts/views.py @@ -27,12 +27,7 @@ class SmartContractViewSet(viewsets.ModelViewSet): authentication_classes = (KeycloakAuthentication,) permission_classes = (IsAuthenticated,) queryset = SmartContract.objects.all() - c_utils = CryptoUtils( - contract_abi_path=settings.ABI_FILE_PATH, - contract_address=settings.CONTRACT_ADDRESS, - registry=settings.FAUCET_AND_INDEX_ENABLED, - faucet=settings.FAUCET_AND_INDEX_ENABLED, - ) + # CryptoUtils will be created per transaction to avoid nonce conflicts def get_permissions(self): if self.action in ['create', 'update', 'partial_update', 'destroy']: @@ -197,10 +192,14 @@ def registry_add(self, request, pk=None): address=settings.ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS ) p_key = decrypt_private_key(account_index_creator.private_key) - receipt = self.c_utils.registry_add( - private_key=p_key, - address_to_add=add_address, - ) + # Create new CryptoUtils instance for registry operations + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + receipt = crypto_utils.registry_add(p_key, add_address) if isinstance(receipt, dict): receipt_data = receipt else: @@ -295,10 +294,14 @@ def faucet_give_to(self, request, pk=None): ) p_key = decrypt_private_key(faucet_creator.private_key) - receipt = self.c_utils.faucet_give_to( - private_key=p_key, - give_to_address=give_to_addr, + # Create new CryptoUtils instance for faucet operations + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, ) + receipt = crypto_utils.faucet_give_to(p_key, give_to_addr) if isinstance(receipt, dict): receipt_data = receipt @@ -363,7 +366,14 @@ def registry_check_active(self, request, pk=None): ) wallet_addr = request.data['address'] - active = self.c_utils.account_index_check_active(wallet_addr) + # Create new CryptoUtils instance for balance check + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + active = crypto_utils.account_index_check_active(wallet_addr) return Response( status=status.HTTP_200_OK, @@ -412,36 +422,27 @@ def faucet_gimme(self, request, pk=None): wallet.private_key ) - gimme_rsp = self.c_utils.faucet_gimme( - decrypted_private_key, - address + # Create new CryptoUtils instance for faucet operations + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + + # Call faucet gimme (new method returns transaction receipt) + receipt = crypto_utils.faucet_gimme(decrypted_private_key, wallet.address) + + # For now, return success since the new method doesn't return detailed info + # You may want to add additional checks here if needed + return Response( + { + 'success': True, + 'transaction_hash': receipt.transactionHash.hex(), + 'gas_used': receipt.gasUsed + }, + status=status.HTTP_200_OK ) - print(f'Gimme rsp: {gimme_rsp}') - if gimme_rsp['success']: - return Response( - { - 'amount': gimme_rsp['amount'] - }, - status=status.HTTP_200_OK - ) - # order matters - elif gimme_rsp['time_check']: - return Response( - { - 'error': - f'you have to wait until {gimme_rsp["time"]}' - }, - status=status.HTTP_400_BAD_REQUEST - ) - elif gimme_rsp['faucet_thresh']: - return Response( - { - 'error': - f'Your balance needs to be below ' - f'{gimme_rsp["threshold"]}' - }, - status=status.HTTP_400_BAD_REQUEST - ) except Exception as e: return Response( @@ -495,7 +496,14 @@ def faucet_next_time(self, request, pk=None): wallet = Wallet.objects.get(user=request.user) address = wallet.address - get_next_time = self.c_utils.faucet_check_time(address) + # Create new CryptoUtils instance for faucet operations + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + get_next_time = crypto_utils.faucet_check_time(address) return Response( { 'can_request': get_next_time['is_older'], @@ -546,7 +554,14 @@ def faucet_balance(self, request, pk=None): wallet = Wallet.objects.get(user=request.user) address = wallet.address - balance_thresh = self.c_utils.faucet_balance_threshold(address) + # Create new CryptoUtils instance for faucet operations + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + balance_thresh = crypto_utils.faucet_balance_threshold(address) return Response( { 'amount': balance_thresh, diff --git a/inethi/transaction/views.py b/inethi/transaction/views.py index 0b04067..b7379a6 100644 --- a/inethi/transaction/views.py +++ b/inethi/transaction/views.py @@ -1,5 +1,6 @@ from rest_framework.permissions import IsAuthenticated from rest_framework.viewsets import ReadOnlyModelViewSet +from rest_framework.pagination import PageNumberPagination from core.models import Transaction from .serializers import TransactionSerializer from rest_framework.decorators import action @@ -8,11 +9,19 @@ from utils.keycloak import KeycloakAuthentication +class TransactionPagination(PageNumberPagination): + """Custom pagination for transaction endpoints.""" + page_size = 20 + page_size_query_param = 'page_size' + max_page_size = 100 + + class TransactionViewSet(ReadOnlyModelViewSet): queryset = Transaction.objects.all() serializer_class = TransactionSerializer authentication_classes = [KeycloakAuthentication] permission_classes = [IsAuthenticated] + pagination_class = TransactionPagination def get_queryset(self): if self.request.user.is_superuser: @@ -25,6 +34,10 @@ def get_queryset(self): @action(detail=False, methods=['get'], url_path='by-user') def list_by_user(self, request): - queryset = self.get_queryset() + queryset = self.get_queryset().order_by('-timestamp') + page = self.paginate_queryset(queryset) + if page is not None: + serializer = self.get_serializer(page, many=True) + return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True) return Response(serializer.data) diff --git a/inethi/users.json b/inethi/users.json new file mode 100644 index 0000000..acbe81a --- /dev/null +++ b/inethi/users.json @@ -0,0 +1,12 @@ +[ + { + "username": "network_admin_1", + "password": "password1", + "email": "network_admin_1@inethi.com" + }, + { + "username": "network_admin_2", + "password": "password2", + "email": "network_admin_2@inethi.com" + } +] diff --git a/inethi/utils/crypto.py b/inethi/utils/crypto.py index 08d34fc..6647cff 100644 --- a/inethi/utils/crypto.py +++ b/inethi/utils/crypto.py @@ -1,36 +1,39 @@ +""" +CryptoUtils with manual transaction building for Web3.py v7 compatibility +Works with Celo via https://forno.celo.org +""" + import json import logging -from cryptography.fernet import Fernet +from cryptography.fernet import Fernet from web3 import Web3 from django.conf import settings -from web3.types import TxReceipt - from datetime import datetime, timezone from django.utils import timezone as django_timezone +from web3.types import TxReceipt logger = logging.getLogger(__name__) +# ---------------- Utility Functions ---------------- # + def convert_wei_to_celo(wei_amount): """Convert wei amount to celo""" - celo_amount = wei_amount / 1e18 - return celo_amount + return wei_amount / 1e18 def encrypt_private_key(private_key: str) -> str: """Fernet encrypt a private key.""" fernet = Fernet(settings.WALLET_ENCRYPTION_KEY) - encrypted_key = fernet.encrypt(private_key.encode()) - return encrypted_key.decode() + return fernet.encrypt(private_key.encode()).decode() def decrypt_private_key(encrypted_key: str) -> str: """Fernet decrypt a private key.""" fernet = Fernet(settings.WALLET_ENCRYPTION_KEY) - decrypted_key = fernet.decrypt(encrypted_key.encode()) - return decrypted_key.decode() + return fernet.decrypt(encrypted_key.encode()).decode() def load_contract(abi_path: str, contract_address: str): @@ -46,17 +49,24 @@ def load_contract(abi_path: str, contract_address: str): return contract +# ---------------- Main Class ---------------- # + class CryptoUtils: - """Utility class for performing blockchain interactions""" + """ + Utility class for blockchain interactions with manual transaction building. + Compatible with Web3.py v7. + """ + def __init__( - self, - contract_abi_path: str, - contract_address: str, - registry: bool = False, - faucet: bool = False + self, + contract_abi_path: str, + contract_address: str, + registry: bool = False, + faucet: bool = False ): self.w3 = Web3(Web3.HTTPProvider(settings.BLOCKCHAIN_PROVIDER_URL)) self.contract = load_contract(contract_abi_path, contract_address) + if registry: self.registry = load_contract( settings.REGISTRY_ABI_FILE_PATH, @@ -64,11 +74,14 @@ def __init__( ) else: self.registry = None + if faucet: self.faucet = load_contract( settings.FAUCET_ABI_FILE_PATH, settings.FAUCET_ADDRESS ) + else: + self.faucet = None def create_wallet(self) -> dict: """Create a wallet on the blockchain.""" @@ -86,9 +99,9 @@ def create_wallet(self) -> dict: return {} def complete_transaction( - self, - private_key: str, - transaction: dict + self, + private_key: str, + transaction: dict ) -> TxReceipt: """ Sign, hash and get transaction receipt. @@ -108,11 +121,11 @@ def complete_transaction( return receipt def estimate_gas_for_transfer( - self, - contract, - from_address: str, - to_address: str, - token_amount: int + self, + contract, + from_address: str, + to_address: str, + token_amount: int ) -> int: """Estimate the gas required for a transfer.""" transfer_function = contract.functions.transfer( @@ -123,188 +136,102 @@ def estimate_gas_for_transfer( {'from': Web3.to_checksum_address(from_address)} ) + # ---------------- Token Transfers ---------------- # + def send_to_wallet_address( - self, - from_address: str, - private_key: str, - to_address: str, - amount: float + self, + from_address: str, + private_key: str, + to_address: str, + amount: float, + nonce: int = None, + max_retries: int = 3 ) -> TxReceipt: - """Send tokens to a wallet address.""" + """Send tokens to a wallet address with nonce management and retry.""" # Calculate token amount adjusted for decimals decimals = self.contract.functions.decimals().call() token_amount = int(amount * (10**decimals)) - # Estimate gas and get current gas price - gas = self.estimate_gas_for_transfer( - self.contract, - from_address, - to_address, - token_amount - ) - print('gas', gas) - - gas_price = self.w3.eth.gas_price - - logger.info( - f"transfering to {to_address} from {from_address} for {token_amount} " - f"with gas {gas} and gas_price {gas_price}" - ) - # Prepare and sign the transaction - nonce = self.w3.eth.get_transaction_count(Web3.to_checksum_address(from_address)) - transfer = self.contract.functions.transfer( - Web3.to_checksum_address(to_address), token_amount - ) - tx = transfer.build_transaction({ - 'chainId': self.w3.eth.chain_id, - 'gas': gas, - 'gasPrice': gas_price, - 'nonce': nonce, - }) - - receipt = self.complete_transaction(private_key, tx) - return receipt - - def check_gas_status(self, from_address: str, gas_amount: int) -> bool: - gas_balance = self.balance_of_celo(from_address) - if gas_balance > gas_amount: - return True - return False - - def balance_of(self, address: str) -> float: - """Check the balance of a wallet.""" - raw_balance = self.contract.functions.balanceOf(address).call() - decimals = self.contract.functions.decimals().call() - return raw_balance / (10**decimals) - - def balance_of_celo(self, address: str) -> float: - """ - Check the CELO balance of a wallet. - This checks the native CELO balance of the address. - """ - try: - # Get the raw balance in Wei - raw_balance = self.w3.eth.get_balance(Web3.to_checksum_address(address)) - # Convert the balance from Wei to CELO - celo_balance = raw_balance - # celo_balance = convert_wei_to_celo(raw_balance) - return celo_balance - except Exception as e: - print(f"Error fetching CELO balance for address {address}: {e}") - return 0.0 - - def faucet_give_to( - self, - private_key: str, - give_to_address: str, - nonce: int = None, - max_retries: int = 3 - ) -> TxReceipt: - """ - Give tokens to an address registered in - the account index, with nonce management and retry. - """ - account = self.w3.eth.account.from_key(private_key) - sender_address = account.address + attempt = 0 - logger.info(f"faucet_give_to: {give_to_address} with nonce {nonce}") + logger.info(f"send_to_wallet_address: {to_address} from {from_address} for {amount}") + while attempt < max_retries: try: + # Estimate gas and get current gas price + gas = self.estimate_gas_for_transfer( + self.contract, + from_address, + to_address, + token_amount + ) + gas_price = self.w3.eth.gas_price + + logger.info( + f"transfering to {to_address} from {from_address} for {token_amount} " + f"with gas {gas} and gas_price {gas_price}" + ) + + # Get nonce if not provided if nonce is None: - nonce_to_use = self.w3.eth.get_transaction_count( - Web3.to_checksum_address(sender_address) - ) + nonce_to_use = self.w3.eth.get_transaction_count(Web3.to_checksum_address(from_address)) else: nonce_to_use = nonce - gas_price = self.w3.eth.gas_price - gas_estimate = self.faucet.functions.giveTo( - Web3.to_checksum_address(give_to_address) - ).estimate_gas({ - 'from': Web3.to_checksum_address(sender_address) - }) - tx = self.faucet.functions.giveTo( - Web3.to_checksum_address(give_to_address) - ).build_transaction( - { - 'from': Web3.to_checksum_address(sender_address), - 'nonce': nonce_to_use, - 'gas': gas_estimate, - 'gasPrice': gas_price, - 'chainId': self.w3.eth.chain_id, - } + + # Prepare and sign the transaction + transfer = self.contract.functions.transfer( + Web3.to_checksum_address(to_address), token_amount ) + tx = transfer.build_transaction({ + 'chainId': self.w3.eth.chain_id, + 'gas': gas, + 'gasPrice': gas_price, + 'nonce': nonce_to_use, + }) + receipt = self.complete_transaction(private_key, tx) if receipt: return receipt else: logger.error( - f"faucet_give_to: {give_to_address}, nonce {nonce} failed." + f"send_to_wallet_address: {to_address}, nonce {nonce} failed." ) raise Exception("Transaction failed") + except Exception as e: - logger.error(f"faucet_give_to attempt {attempt+1} failed: {e}") + logger.error(f"send_to_wallet_address attempt {attempt+1} failed: {e}") # Check for nonce error if hasattr(e, 'args') and e.args and 'nonce too low' in str(e.args[0]): logger.warning( "Nonce too low error detected, refetching nonce and retrying..." ) - nonce = self.w3.eth.get_transaction_count( - Web3.to_checksum_address(sender_address) - ) + nonce = self.w3.eth.get_transaction_count(Web3.to_checksum_address(from_address)) attempt += 1 continue raise - raise Exception("faucet_give_to failed after retries") - - def account_index_check_active(self, address_to_check: str) -> bool: - """Check if an address is active on the account index.""" - if self.registry is None: - raise Exception("Registry contract not loaded.") - active = self.registry.functions.isActive( - Web3.to_checksum_address(address_to_check) - ).call() - return active - - def pre_transaction_check( - self, - private_key_admin: str, - from_address: str, - to_address: str, - amount: float) -> bool: - """Check if transaction will be successful. Rectify if not""" - decimals = self.contract.functions.decimals().call() - token_amount = int(amount * (10 ** decimals)) + + raise Exception("send_to_wallet_address failed after retries") - # Estimate gas and get current gas price - gas = self.estimate_gas_for_transfer( - self.contract, - from_address, - to_address, - token_amount - ) + # ---------------- Faucet ---------------- # - # ensure wallet has enough gas - gas_status = self.check_gas_status(from_address, gas) - # there is not enough gas to transact - if not gas_status: - active = self.account_index_check_active(from_address) - if not active: - self.registry_add(private_key_admin, from_address) - self.faucet_give_to(private_key_admin, from_address) - # if no error is raised return true - return True - - def registry_add( - self, private_key, address_to_add, nonce=None, max_retries=3 - ): + def faucet_give_to( + self, + private_key: str, + give_to_address: str, + nonce: int = None, + max_retries: int = 3 + ) -> TxReceipt: """ - Add an address to a registry using the private key - of the contract owner, with nonce management and retry. + Give tokens to an address registered in + the account index, with nonce management and retry. """ - if self.registry is None: - raise Exception("Registry contract not loaded.") + if not self.faucet: + raise Exception("Faucet contract not loaded") + account = self.w3.eth.account.from_key(private_key) sender_address = account.address attempt = 0 + logger.info(f"faucet_give_to: {give_to_address} with nonce {nonce}") + while attempt < max_retries: try: if nonce is None: @@ -313,17 +240,16 @@ def registry_add( ) else: nonce_to_use = nonce + gas_price = self.w3.eth.gas_price - gas_estimate = self.registry.functions.add( - Web3.to_checksum_address(address_to_add) + gas_estimate = self.faucet.functions.giveTo( + Web3.to_checksum_address(give_to_address) ).estimate_gas({ 'from': Web3.to_checksum_address(sender_address) }) - logger.info( - f"registry_add: {address_to_add}, gas {gas_estimate}" - ) - tx = self.registry.functions.add( - Web3.to_checksum_address(address_to_add) + + tx = self.faucet.functions.giveTo( + Web3.to_checksum_address(give_to_address) ).build_transaction( { 'from': Web3.to_checksum_address(sender_address), @@ -333,13 +259,18 @@ def registry_add( 'chainId': self.w3.eth.chain_id, } ) + receipt = self.complete_transaction(private_key, tx) if receipt: return receipt else: + logger.error( + f"faucet_give_to: {give_to_address}, nonce {nonce} failed." + ) raise Exception("Transaction failed") + except Exception as e: - logger.error(f"registry_add attempt {attempt+1} failed: {e}") + logger.error(f"faucet_give_to attempt {attempt+1} failed: {e}") # Check for nonce error if hasattr(e, 'args') and e.args and 'nonce too low' in str(e.args[0]): logger.warning( @@ -351,37 +282,14 @@ def registry_add( attempt += 1 continue raise - raise Exception("registry_add failed after retries") - - def faucet_check_time(self, address_to_check: str) -> dict: - """Check if an address can receive funds at this time""" - next_time = self.faucet.functions.nextTime( - _subject=address_to_check - ).call({'from': address_to_check}) - - aware_utc_dt = datetime.fromtimestamp(next_time, tz=timezone.utc) - now = django_timezone.localtime(django_timezone.now()) - local_dt = django_timezone.localtime(aware_utc_dt) - is_older = local_dt <= now - return { - 'is_older': is_older, - 'time_stamp': str(local_dt), - } - - def faucet_balance_threshold(self, address: str) -> float: - """Check what the threshold amount is for a faucet""" - try: - balance_threshold = self.faucet.functions.nextBalance( - _subject=Web3.to_checksum_address(address) - ).call({'from': Web3.to_checksum_address(address)}) - celo_amount = convert_wei_to_celo(balance_threshold) - return celo_amount - except Exception as e: - print(f'Error calling nextBalance: {e}') - return 0.0 + + raise Exception("faucet_give_to failed after retries") def faucet_gimme(self, private_key: str, address: str) -> dict: """Call the gimme function for an account from the faucet""" + if not self.faucet: + raise Exception("Faucet contract not loaded") + raw_balance = self.w3.eth.get_balance(Web3.to_checksum_address(address)) balance = convert_wei_to_celo(raw_balance) @@ -418,6 +326,7 @@ def faucet_gimme(self, private_key: str, address: str) -> dict: gas_estimate = self.faucet.functions.gimme().estimate_gas({ 'from': Web3.to_checksum_address(address), }) + tx = self.faucet.functions.gimme().build_transaction({ 'from': Web3.to_checksum_address(address), 'nonce': nonce, @@ -450,6 +359,7 @@ def faucet_gimme(self, private_key: str, address: str) -> dict: } except Exception as e: print(f'Error processing events: {e}') + return { 'balance': balance, 'threshold': faucet_thresh, @@ -458,3 +368,182 @@ def faucet_gimme(self, private_key: str, address: str) -> dict: 'time_check': False, 'time': -1 } + + def faucet_check_time(self, address_to_check: str) -> dict: + """Check if an address can receive funds at this time""" + if not self.faucet: + raise Exception("Faucet contract not loaded") + + next_time = self.faucet.functions.nextTime( + _subject=address_to_check + ).call({'from': address_to_check}) + + aware_utc_dt = datetime.fromtimestamp(next_time, tz=timezone.utc) + now = django_timezone.localtime(django_timezone.now()) + local_dt = django_timezone.localtime(aware_utc_dt) + is_older = local_dt <= now + return { + 'is_older': is_older, + 'time_stamp': str(local_dt), + } + + def faucet_balance_threshold(self, address: str) -> float: + """Check what the threshold amount is for a faucet""" + if not self.faucet: + raise Exception("Faucet contract not loaded") + + try: + balance_threshold = self.faucet.functions.nextBalance( + _subject=Web3.to_checksum_address(address) + ).call({'from': Web3.to_checksum_address(address)}) + celo_amount = convert_wei_to_celo(balance_threshold) + return celo_amount + except Exception as e: + logger.error(f'Error calling nextBalance: {e}') + return 0.0 + + # ---------------- Registry ---------------- # + + def registry_add( + self, private_key, address_to_add, nonce=None, max_retries=3 + ): + """ + Add an address to a registry using the private key + of the contract owner, with nonce management and retry. + """ + if self.registry is None: + raise Exception("Registry contract not loaded.") + + account = self.w3.eth.account.from_key(private_key) + sender_address = account.address + attempt = 0 + + while attempt < max_retries: + try: + if nonce is None: + nonce_to_use = self.w3.eth.get_transaction_count( + Web3.to_checksum_address(sender_address) + ) + else: + nonce_to_use = nonce + + gas_price = self.w3.eth.gas_price + gas_estimate = self.registry.functions.add( + Web3.to_checksum_address(address_to_add) + ).estimate_gas({ + 'from': Web3.to_checksum_address(sender_address) + }) + + logger.info( + f"registry_add: {address_to_add}, gas {gas_estimate}" + ) + + tx = self.registry.functions.add( + Web3.to_checksum_address(address_to_add) + ).build_transaction( + { + 'from': Web3.to_checksum_address(sender_address), + 'nonce': nonce_to_use, + 'gas': gas_estimate, + 'gasPrice': gas_price, + 'chainId': self.w3.eth.chain_id, + } + ) + + receipt = self.complete_transaction(private_key, tx) + if receipt: + return receipt + else: + raise Exception("Transaction failed") + + except Exception as e: + logger.error(f"registry_add attempt {attempt+1} failed: {e}") + # Check for nonce error + if hasattr(e, 'args') and e.args and 'nonce too low' in str(e.args[0]): + logger.warning( + "Nonce too low error detected, refetching nonce and retrying..." + ) + nonce = self.w3.eth.get_transaction_count( + Web3.to_checksum_address(sender_address) + ) + attempt += 1 + continue + raise + + raise Exception("registry_add failed after retries") + + def account_index_check_active(self, address_to_check: str) -> bool: + """Check if an address is active on the account index.""" + if self.registry is None: + raise Exception("Registry contract not loaded.") + + active = self.registry.functions.isActive( + Web3.to_checksum_address(address_to_check) + ).call() + return active + + # ---------------- Balances ---------------- # + + def balance_of(self, address: str) -> float: + """Get token balance for an address.""" + try: + balance = self.contract.functions.balanceOf( + Web3.to_checksum_address(address) + ).call() + decimals = self.contract.functions.decimals().call() + return balance / (10 ** decimals) + except Exception as e: + logger.error(f"Error getting balance: {e}") + return 0.0 + + def balance_of_celo(self, address: str) -> float: + """Get CELO balance for an address.""" + try: + balance = self.w3.eth.get_balance(Web3.to_checksum_address(address)) + return self.w3.from_wei(balance, 'ether') + except Exception as e: + logger.error(f"Error getting CELO balance: {e}") + return 0.0 + + def check_gas_status(self, from_address: str, gas_amount: int) -> bool: + """Check if address has enough gas for transaction.""" + try: + balance = self.w3.eth.get_balance(Web3.to_checksum_address(from_address)) + gas_price = self.w3.eth.gas_price + required_gas = gas_amount * gas_price + return balance >= required_gas + except Exception as e: + logger.error(f"Error checking gas status: {e}") + return False + + # ---------------- Pre-Transaction Checks ---------------- # + + def pre_transaction_check( + self, + private_key_admin: str, + from_address: str, + to_address: str, + amount: float + ) -> bool: + """Check if transaction will be successful. Rectify if not.""" + decimals = self.contract.functions.decimals().call() + token_amount = int(amount * (10 ** decimals)) + + # Estimate gas and get current gas price + gas = self.estimate_gas_for_transfer( + self.contract, + from_address, + to_address, + token_amount + ) + + # ensure wallet has enough gas + gas_status = self.check_gas_status(from_address, gas) + # there is not enough gas to transact + if not gas_status: + active = self.account_index_check_active(from_address) + if not active: + self.registry_add(private_key_admin, from_address) + self.faucet_give_to(private_key_admin, from_address) + # if no error is raised return true + return True diff --git a/inethi/utils/radius_desk.py b/inethi/utils/radius_desk.py index d05a787..363e6a4 100644 --- a/inethi/utils/radius_desk.py +++ b/inethi/utils/radius_desk.py @@ -131,6 +131,41 @@ def fetch_voucher_details(token, voucher_code, cloud_id, base_url, limit=150): raise Exception(f"Failed to fetch voucher details: {response.text}") +def fetch_voucher_stats(token, voucher_code, cloud_id, base_url, limit=150): + """ + Fetch detailed statistics for a specific voucher from the RADIUSdesk API. + This uses the radaccts endpoint to get comprehensive usage data. + """ + url = f"{base_url}/radaccts/index.json" + + # Generate a current timestamp string for the _dc parameter + timestamp = str(int(time.time() * 1000)) + + params = { + "_dc": timestamp, + "username": voucher_code, # voucher_code is passed as the username + "page": 1, + "start": 0, + "limit": limit, + "token": token, + "sel_language": "4_4", + "cloud_id": cloud_id, + } + cookies = {"Token": token} + + response = requests.get( + url, + headers=HEADERS_URL_ENCODED, + params=params, + cookies=cookies + ) + + if response.status_code == 200: + return response.json() + else: + raise Exception(f"Failed to fetch voucher stats: {response.text}") + + def create_voucher( token, base_url, diff --git a/inethi/wallet/serializers.py b/inethi/wallet/serializers.py index 4873efd..c8899e4 100644 --- a/inethi/wallet/serializers.py +++ b/inethi/wallet/serializers.py @@ -65,35 +65,30 @@ def create(self, validated_data): address=settings.ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS ) p_key_admin = decrypt_private_key(account_index_creator.private_key) - sender_address_admin = account_index_creator.address - # Fetch the starting nonce - nonce = crypto_utils.w3.eth.get_transaction_count(sender_address_admin) - # registry_add with nonce - crypto_utils.registry_add( - private_key=p_key_admin, - address_to_add=w_addr, - nonce=nonce + + # Create new CryptoUtils instance for registry operations + registry_crypto = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, ) - nonce += 1 # increment for next tx + registry_crypto.registry_add(p_key_admin, w_addr) + # send the account gas faucet_creator = Wallet.objects.get( # type: ignore[attr-defined] address=settings.FAUCET_ADMIN_WALLET_ADDRESS ) p_key_faucet = decrypt_private_key(faucet_creator.private_key) - sender_address_faucet = faucet_creator.address - if sender_address_faucet == sender_address_admin: - # use incremented nonce - faucet_nonce = nonce - else: - faucet_nonce = crypto_utils.w3.eth.get_transaction_count( - sender_address_faucet - ) - nonce = nonce + 1 - crypto_utils.faucet_give_to( - private_key=p_key_faucet, - give_to_address=w_addr, - nonce=faucet_nonce + + # Create new CryptoUtils instance for faucet operations + faucet_crypto = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, ) + faucet_crypto.faucet_give_to(p_key_faucet, w_addr) except Exception as e: logger.error( f"Error during wallet creation (registry/faucet): {e}" diff --git a/inethi/wallet/views.py b/inethi/wallet/views.py index 0463ad6..9f8dabc 100644 --- a/inethi/wallet/views.py +++ b/inethi/wallet/views.py @@ -23,12 +23,7 @@ class WalletViewSet(viewsets.ModelViewSet): serializer_class = WalletSerializer authentication_classes = (KeycloakAuthentication,) permission_classes = (permissions.IsAuthenticated,) - crypto_utils = CryptoUtils( - contract_abi_path=settings.ABI_FILE_PATH, - contract_address=settings.CONTRACT_ADDRESS, - registry=settings.FAUCET_AND_INDEX_ENABLED, - faucet=settings.FAUCET_AND_INDEX_ENABLED, - ) + # CryptoUtils will be created per transaction to avoid nonce conflicts def get_queryset(self): """ @@ -122,14 +117,28 @@ def send_token(self, request, pk=None): address=settings.ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS ) p_key = decrypt_private_key(faucet_creator.private_key) - self.crypto_utils.pre_transaction_check( + # Create new CryptoUtils instance for pre-transaction check + check_crypto = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + check_crypto.pre_transaction_check( private_key_admin=p_key, from_address=wallet.address, to_address=recipient_address, amount=float(amount) ) + # Create new CryptoUtils instance for this transaction + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) # Send tokens using CryptoUtils - tx_receipt = self.crypto_utils.send_to_wallet_address( + tx_receipt = crypto_utils.send_to_wallet_address( wallet.address, decrypted_private_key, recipient_address, @@ -220,14 +229,29 @@ def send_token_pk_username(self, request, pk=None): address=settings.ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS ) p_key = decrypt_private_key(faucet_creator.private_key) - self.crypto_utils.pre_transaction_check( + # Create new CryptoUtils instance for pre-transaction check + check_crypto = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + check_crypto.pre_transaction_check( private_key_admin=p_key, from_address=wallet.address, to_address=recipient_address, amount=float(amount) ) + # Create new CryptoUtils instance for this transaction + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + # Send tokens using CryptoUtils - tx_receipt = self.crypto_utils.send_to_wallet_address( + tx_receipt = crypto_utils.send_to_wallet_address( wallet.address, decrypted_private_key, recipient_address, @@ -295,8 +319,16 @@ def send_token_user_address(self, request): decrypted_private_key = utils.crypto.decrypt_private_key( wallet.private_key ) + # Create new CryptoUtils instance for this transaction + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + # Send tokens using CryptoUtils - tx_receipt = self.crypto_utils.send_to_wallet_address( + tx_receipt = crypto_utils.send_to_wallet_address( wallet.address, decrypted_private_key, recipient_address, @@ -369,8 +401,16 @@ def send_token_username(self, request): decrypted_private_key = utils.crypto.decrypt_private_key( wallet.private_key ) + # Create new CryptoUtils instance for this transaction + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + # Send tokens using CryptoUtils - tx_receipt = self.crypto_utils.send_to_wallet_address( + tx_receipt = crypto_utils.send_to_wallet_address( wallet.address, decrypted_private_key, recipient_address, @@ -403,7 +443,14 @@ def user_wallet_balance(self, request): if wallet: token_name = wallet.token_common_name if token_name == 'KRONE': - balance = self.crypto_utils.balance_of(wallet.address) + # Create new CryptoUtils instance for balance check + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + balance = crypto_utils.balance_of(wallet.address) return Response( {'balance': balance}, status=status.HTTP_200_OK @@ -531,7 +578,15 @@ def purchase_voucher(self, request): # Perform the crypto transaction for the voucher cost try: - tx_receipt = self.crypto_utils.send_to_wallet_address( + # Create new CryptoUtils instance for this transaction + crypto_utils = CryptoUtils( + contract_abi_path=settings.ABI_FILE_PATH, + contract_address=settings.CONTRACT_ADDRESS, + registry=settings.FAUCET_AND_INDEX_ENABLED, + faucet=settings.FAUCET_AND_INDEX_ENABLED, + ) + + tx_receipt = crypto_utils.send_to_wallet_address( wallet.address, decrypted_private_key, admin_wallet.address, diff --git a/requirements.txt b/requirements.txt index 60cedc1..98e48e8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -35,6 +35,7 @@ django-celery-beat==2.7.0 django-cors-headers==4.6.0 django-environ==0.11.2 django-timezone-field==7.1 +django-timescaledb==0.2.10 djangorestframework==3.15.2 drf-spectacular==0.27.2 eth-account==0.13.4 diff --git a/setup.sh b/setup.sh new file mode 100755 index 0000000..dad1a2c --- /dev/null +++ b/setup.sh @@ -0,0 +1,420 @@ +#!/bin/bash + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +PURPLE='\033[0;35m' +CYAN='\033[0;36m' +NC='\033[0m' # No Color + +# Function to print colored output +print_color() { + printf "${1}${2}${NC}\n" +} + +# Function to print section headers +print_header() { + echo + print_color $CYAN "==================================" + print_color $CYAN "$1" + print_color $CYAN "==================================" + echo +} + +# Function to check if command exists +command_exists() { + command -v "$1" >/dev/null 2>&1 +} + +# Function to wait with countdown +wait_with_countdown() { + local seconds=$1 + local message=$2 + + print_color $YELLOW "$message" + for ((i=seconds; i>0; i--)); do + printf "\r${YELLOW}Waiting... ${i} seconds remaining${NC}" + sleep 1 + done + printf "\r${GREEN}Wait complete! ${NC}\n" +} + + + +print_header "iNethi Backend Production Setup" + +# Check if we're in the correct directory +if [ ! -f "docker-compose-prod.yml" ]; then + print_color $RED "Error: docker-compose-prod.yml not found!" + print_color $RED "Please run this script from the root of the iNethi backend repository." + exit 1 +fi + +# Check for required commands +print_color $BLUE "Checking prerequisites..." + +if ! command_exists docker; then + print_color $RED "Error: Docker is not installed or not in PATH" + print_color $YELLOW "Please install Docker first: https://docs.docker.com/engine/install/" + exit 1 +fi + +if ! command_exists docker-compose && ! docker compose version >/dev/null 2>&1; then + print_color $RED "Error: Docker Compose is not installed or not in PATH" + print_color $YELLOW "Please install Docker Compose: https://docs.docker.com/compose/install/" + exit 1 +fi + +print_color $GREEN "✓ Docker and Docker Compose are available" + +# Check Docker permissions +print_color $BLUE "Checking Docker permissions..." + +if ! docker ps >/dev/null 2>&1; then + print_color $RED "Error: Cannot run Docker commands without sudo!" + echo + print_color $YELLOW "This usually means your user is not in the 'docker' group." + print_color $YELLOW "To fix this issue, you have two options:" + echo + print_color $CYAN "Option 1 (Recommended): Add your user to the docker group" + print_color $YELLOW " 1. Run: sudo usermod -aG docker \$USER" + print_color $YELLOW " 2. Log out and log back in (or restart your session)" + print_color $YELLOW " 3. Verify with: docker ps" + echo + print_color $CYAN "Option 2: Run this script with sudo (not recommended for security reasons)" + print_color $YELLOW " sudo ./setup.sh" + echo + print_color $BLUE "For detailed instructions, see:" + print_color $BLUE "https://docs.docker.com/engine/install/linux-postinstall/" + echo + print_color $RED "Please fix the Docker permissions issue and run this script again." + exit 1 +fi + +print_color $GREEN "✓ Docker permissions are correct" + +# Test Docker Compose permissions +if ! (docker-compose version >/dev/null 2>&1 || docker compose version >/dev/null 2>&1); then + print_color $RED "Error: Cannot run Docker Compose commands!" + print_color $YELLOW "This might be related to Docker permissions. Please ensure Docker is properly configured." + print_color $BLUE "See: https://docs.docker.com/engine/install/linux-postinstall/" + exit 1 +fi + +print_color $GREEN "✓ Docker Compose permissions are correct" + +# Check if .env file exists +if [ ! -f ".env" ]; then + print_color $RED "Error: .env file not found!" + print_color $RED "Please create a .env file based on .env.example before running this script." + print_color $YELLOW "Required variables include:" + print_color $YELLOW "- DB_HOST, DB_NAME, DB_USER, DB_PASS" + print_color $YELLOW "- TRAEFIK_BACKEND_HOST, TRAEFIK_ENTRYPOINTS, TRAEFIK_CERTRESOLVER, TRAEFIK_NETWORK_BRIDGE" + print_color $YELLOW "- KEYCLOAK configuration" + print_color $YELLOW "- Blockchain configuration" + exit 1 +fi + +print_color $GREEN "✓ .env file found" + +# Check JSON configuration files +print_header "Configuration Files Check" +print_color $BLUE "Checking JSON configuration files..." + +# Check if JSON files exist +json_files_missing=false + +if [ ! -f "inethi/sample_radiusdesk_config.json" ]; then + print_color $RED "Error: inethi/sample_radiusdesk_config.json not found!" + print_color $YELLOW "Please create this file before running the setup script." + json_files_missing=true +fi + +if [ ! -f "inethi/smart_contracts.json" ]; then + print_color $RED "Error: inethi/smart_contracts.json not found!" + print_color $YELLOW "Please create this file before running the setup script." + json_files_missing=true +fi + +if [ ! -f "inethi/users.json" ]; then + print_color $RED "Error: inethi/users.json not found!" + print_color $YELLOW "Please create this file before running the setup script." + json_files_missing=true +fi + +if [ "$json_files_missing" = true ]; then + print_color $RED "Please create the missing JSON files and run the script again." + exit 1 +fi + +# Validate JSON files if jq is available +if command_exists jq; then + print_color $BLUE "Validating JSON files..." + + if ! jq empty inethi/sample_radiusdesk_config.json 2>/dev/null; then + print_color $RED "Error: inethi/sample_radiusdesk_config.json contains invalid JSON" + exit 1 + fi + + if ! jq empty inethi/smart_contracts.json 2>/dev/null; then + print_color $RED "Error: inethi/smart_contracts.json contains invalid JSON" + exit 1 + fi + + if ! jq empty inethi/users.json 2>/dev/null; then + print_color $RED "Error: inethi/users.json contains invalid JSON" + exit 1 + fi + + print_color $GREEN "✓ All JSON files are valid" +else + print_color $YELLOW "Warning: jq not installed, skipping JSON validation" +fi + +print_color $GREEN "✓ All required JSON files found" + +# Display and confirm .env file +print_header "Environment Configuration Review" +print_color $CYAN "About to display your .env file contents..." +print_color $YELLOW "Press Enter to continue and view the .env file..." +read -r + +print_color $YELLOW "Current .env file contents:" +echo +cat .env +echo + +read -p "$(print_color $CYAN "Are you happy with the .env configuration? (y/N): ")" -r +if [[ ! $REPLY =~ ^[Yy]$ ]]; then + print_color $YELLOW "Please update your .env file and run the script again." + exit 0 +fi + +# Display and confirm JSON files +print_header "JSON Configuration Files Review" +print_color $CYAN "About to display your JSON configuration files..." +print_color $YELLOW "Press Enter to continue and view the JSON files..." +read -r + +print_color $YELLOW "Current sample_radiusdesk_config.json contents:" +echo +cat inethi/sample_radiusdesk_config.json +echo + +print_color $YELLOW "Current smart_contracts.json contents:" +echo +cat inethi/smart_contracts.json +echo + +print_color $YELLOW "Current users.json contents:" +echo +cat inethi/users.json +echo + +read -p "$(print_color $CYAN "Are you happy with the JSON configuration files? (y/N): ")" -r +if [[ ! $REPLY =~ ^[Yy]$ ]]; then + print_color $YELLOW "Please update your JSON configuration files and run the script again." + exit 0 +fi + +# Basic validation of .env file +print_color $BLUE "Validating .env file..." + +required_vars=("DB_HOST" "DB_NAME" "DB_USER" "DB_PASS" "TRAEFIK_BACKEND_HOST" "TRAEFIK_ENTRYPOINTS" "TRAEFIK_CERTRESOLVER" "TRAEFIK_NETWORK_BRIDGE") + +for var in "${required_vars[@]}"; do + if ! grep -q "^${var}=" .env; then + print_color $RED "Error: Required variable $var not found in .env file" + exit 1 + fi + + # Check if variable has a value + value=$(grep "^${var}=" .env | cut -d'=' -f2-) + if [ -z "$value" ] || [ "$value" = "your_value_here" ] || [ "$value" = "changeme" ]; then + print_color $RED "Error: Variable $var appears to have a placeholder value. Please set a real value." + exit 1 + fi +done + +print_color $GREEN "✓ .env file validation passed" + +# Stop any existing containers +print_header "Stopping Existing Containers" +if docker compose -f docker-compose-prod.yml down 2>/dev/null || docker compose -f docker-compose-prod.yml down 2>/dev/null; then + print_color $GREEN "✓ Stopped any existing containers" +else + print_color $YELLOW "No existing containers to stop" +fi + +# Build the Docker image +print_header "Building Docker Image" +print_color $BLUE "Building the iNethi backend image..." + +if docker compose -f docker-compose-prod.yml build 2>/dev/null || docker compose -f docker-compose-prod.yml build 2>/dev/null; then + print_color $GREEN "✓ Docker image built successfully" +else + print_color $RED "Error: Failed to build Docker image" + print_color $YELLOW "This might be a permissions issue. If you're getting permission denied errors," + print_color $YELLOW "please check: https://docs.docker.com/engine/install/linux-postinstall/" + exit 1 +fi + +# Start database and redis first +print_header "Starting Database and Redis" +print_color $BLUE "Starting TimescaleDB database and Redis..." + +if docker compose -f docker-compose-prod.yml up -d db redis 2>/dev/null || docker compose -f docker-compose-prod.yml up -d db redis 2>/dev/null; then + print_color $GREEN "✓ Database and Redis started" +else + print_color $RED "Error: Failed to start database and Redis" + print_color $YELLOW "Check Docker permissions if you're getting permission denied errors:" + print_color $YELLOW "https://docs.docker.com/engine/install/linux-postinstall/" + exit 1 +fi + +wait_with_countdown 15 "Waiting for database and Redis to be ready..." + +# Start the Django API +print_header "Starting Django API" +print_color $BLUE "Starting the iNethi backend API..." + +if docker compose -f docker-compose-prod.yml up -d app 2>/dev/null || docker compose -f docker-compose-prod.yml up -d app 2>/dev/null; then + print_color $GREEN "✓ Django API started" +else + print_color $RED "Error: Failed to start Django API" + exit 1 +fi + +wait_with_countdown 45 "Waiting for Django API to complete migrations and be ready..." + +# Check if Django is healthy +print_color $BLUE "Checking Django API health..." +max_attempts=8 +attempt=1 + +while [ $attempt -le $max_attempts ]; do + if docker logs inethi-backend-v1 2>&1 | grep -q "Starting development server" || \ + docker logs inethi-backend-v1 2>&1 | grep -q "Booting worker" || \ + docker logs inethi-backend-v1 2>&1 | grep -q "Listening on"; then + print_color $GREEN "✓ Django API is healthy and ready" + break + fi + + if [ $attempt -eq $max_attempts ]; then + print_color $YELLOW "Warning: Could not confirm Django API health, but proceeding..." + print_color $BLUE "You can check the logs with: docker logs inethi-backend-v1" + break + fi + + print_color $YELLOW "Attempt $attempt/$max_attempts: Django API not ready yet, waiting..." + sleep 10 + ((attempt++)) +done + +# Start Celery worker +print_header "Starting Celery Worker" +print_color $BLUE "Starting Celery worker..." + +if docker compose -f docker-compose-prod.yml up -d celery 2>/dev/null || docker compose -f docker-compose-prod.yml up -d celery 2>/dev/null; then + print_color $GREEN "✓ Celery worker started" +else + print_color $RED "Error: Failed to start Celery worker" + exit 1 +fi + +wait_with_countdown 10 "Waiting for Celery worker to be ready..." + +# Start Celery beat +print_header "Starting Celery Beat" +print_color $BLUE "Starting Celery beat scheduler..." + +if docker compose -f docker-compose-prod.yml up -d celery_beat 2>/dev/null || docker compose -f docker-compose-prod.yml up -d celery_beat 2>/dev/null; then + print_color $GREEN "✓ Celery beat started" +else + print_color $RED "Error: Failed to start Celery beat" + exit 1 +fi + +# Run management commands to populate the database +print_header "Populating Database" +print_color $BLUE "Running management commands to populate the database..." + +# Create users from JSON +print_color $BLUE "Creating users from users.json..." +if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_users_from_json inethi/users.json 2>/dev/null || \ + docker compose -f docker-compose-prod.yml exec -T app python manage.py create_users_from_json inethi/users.json 2>/dev/null; then + print_color $GREEN "✓ Users created successfully" +else + print_color $YELLOW "Warning: Failed to create users (they might already exist)" +fi + +# Create smart contracts from JSON +print_color $BLUE "Creating smart contracts from smart_contracts.json..." +if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_smart_contracts_from_json inethi/smart_contracts.json 2>/dev/null || \ + docker compose -f docker-compose-prod.yml exec -T app python manage.py create_smart_contracts_from_json inethi/smart_contracts.json 2>/dev/null; then + print_color $GREEN "✓ Smart contracts created successfully" +else + print_color $YELLOW "Warning: Failed to create smart contracts (they might already exist)" +fi + +# Create RADIUSdesk instances from JSON +print_color $BLUE "Creating RADIUSdesk instances from sample_radiusdesk_config.json..." +if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_radiusdesk_from_json inethi/sample_radiusdesk_config.json 2>/dev/null || \ + docker compose -f docker-compose-prod.yml exec -T app python manage.py create_radiusdesk_from_json inethi/sample_radiusdesk_config.json 2>/dev/null; then + print_color $GREEN "✓ RADIUSdesk instances created successfully" +else + print_color $YELLOW "Warning: Failed to create RADIUSdesk instances (they might already exist)" +fi + +# Create superuser +print_color $BLUE "Creating superuser..." +if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_superuser --noinput 2>/dev/null || \ + docker compose -f docker-compose-prod.yml exec -T app python manage.py create_superuser --noinput 2>/dev/null; then + print_color $GREEN "✓ Superuser created successfully" +else + print_color $YELLOW "Warning: Failed to create superuser (might already exist or need manual creation)" + print_color $BLUE "You can create a superuser manually with:" + print_color $YELLOW " docker compose -f docker-compose-prod.yml exec app python manage.py create_superuser" +fi + +# Final status check +print_header "Deployment Status" +print_color $BLUE "Checking container status..." + +if docker compose -f docker-compose-prod.yml ps 2>/dev/null || docker compose -f docker-compose-prod.yml ps 2>/dev/null; then + echo + print_color $GREEN "✓ All containers are running!" + echo + print_color $CYAN "You can now access:" + print_color $YELLOW "- Admin Interface: http://:8000/admin" + print_color $YELLOW "- API Docs: http://:8000/api/v1/docs" + print_color $YELLOW "- API Schema: http://:8000/api/v1/schema/" + echo + print_color $BLUE "To view logs, use:" + print_color $YELLOW " docker compose -f docker-compose-prod.yml logs -f [service_name]" + print_color $YELLOW " Available services: app, db, redis, celery, celery_beat" + echo + print_color $BLUE "To stop all services:" + print_color $YELLOW " docker compose -f docker-compose-prod.yml down" + echo + print_color $BLUE "Useful commands:" + print_color $YELLOW " docker compose -f docker-compose-prod.yml logs app # View Django logs" + print_color $YELLOW " docker compose -f docker-compose-prod.yml logs celery # View Celery worker logs" + print_color $YELLOW " docker compose -f docker-compose-prod.yml restart app # Restart Django service" + echo + print_color $BLUE "Management commands:" + print_color $YELLOW " docker compose -f docker-compose-prod.yml exec app python manage.py create_superuser" + print_color $YELLOW " docker compose -f docker-compose-prod.yml exec app python manage.py shell" + echo +else + print_color $RED "Error: Failed to get container status" + print_color $YELLOW "This might be a permissions issue. Check:" + print_color $YELLOW "https://docs.docker.com/engine/install/linux-postinstall/" + exit 1 +fi + +print_header "Setup Complete!" +print_color $GREEN "iNethi backend is now running successfully!" +print_color $GREEN "All services are up and database has been populated with initial data." From d912699f428b574a87c44c64836f37ba5ed8fbf4 Mon Sep 17 00:00:00 2001 From: Keegan White Date: Fri, 22 Aug 2025 12:41:15 +0200 Subject: [PATCH 2/7] Update flake8 configuration and refactor management commands - Increased `max-line-length` in `.flake8` from 90 to 120. - Refactored `create_radiusdesk_from_json.py` to use a `defaults` dictionary for profile creation. - Improved formatting in `create_smart_contracts_from_json.py`, `create_users_from_json.py`, and other management commands for better readability. - Added missing newlines and adjusted spacing in various files for consistency. --- inethi/.flake8 | 4 +- .../commands/create_radiusdesk_from_json.py | 31 +++++---- .../create_smart_contracts_from_json.py | 65 ++++++++++--------- .../management/commands/create_superuser.py | 11 ++-- .../commands/create_users_from_json.py | 60 ++++++++--------- .../core/management/commands/wait_for_db.py | 1 + inethi/inethi/settings.py | 26 ++++---- inethi/network/views.py | 8 +-- inethi/radiusdesk/views.py | 20 +++--- inethi/reward/tasks.py | 10 +-- inethi/smart_contracts/views.py | 9 ++- inethi/user/admin.py | 2 +- inethi/utils/crypto.py | 58 ++++++++--------- .../superuser_or_read_only_permission.py | 1 + inethi/wallet/serializers.py | 6 +- 15 files changed, 160 insertions(+), 152 deletions(-) diff --git a/inethi/.flake8 b/inethi/.flake8 index 9e9684c..641c7fe 100644 --- a/inethi/.flake8 +++ b/inethi/.flake8 @@ -1,8 +1,8 @@ [flake8] -max-line-length = 90 +max-line-length = 120 exclude = migrations, __pycache__, manage.py, - settings.py \ No newline at end of file + settings.py, \ No newline at end of file diff --git a/inethi/core/management/commands/create_radiusdesk_from_json.py b/inethi/core/management/commands/create_radiusdesk_from_json.py index 0b0a8fb..361925a 100644 --- a/inethi/core/management/commands/create_radiusdesk_from_json.py +++ b/inethi/core/management/commands/create_radiusdesk_from_json.py @@ -250,22 +250,24 @@ def handle(self, *args, **options): }) else: # Create profile in database + defaults = { + 'radius_desk_id': profile_radius_desk_id, + 'data_limit_enabled': profile_data.get('data_limit_enabled', False), + 'data_limit_gb': profile_data.get('data_limit_gb', 0), + 'data_limit_reset': profile_data.get('data_limit_reset', 'never'), + 'speed_limit_enabled': profile_data.get('speed_limit_enabled', False), + 'speed_limit_mbs': profile_data.get('speed_limit_mbs', 0), + 'limit_session_enabled': profile_data.get( + 'limit_session_enabled', False), + 'session_limit': profile_data.get('session_limit', 0), + 'cost': profile_data.get('cost', 0) + } profile, profile_created = RadiusDeskProfile.objects.get_or_create( name=profile_name, realm=realm, cloud=cloud, radius_desk_instance=instance, - defaults={ - 'radius_desk_id': profile_radius_desk_id, - 'data_limit_enabled': profile_data.get('data_limit_enabled', False), - 'data_limit_gb': profile_data.get('data_limit_gb', 0), - 'data_limit_reset': profile_data.get('data_limit_reset', 'never'), - 'speed_limit_enabled': profile_data.get('speed_limit_enabled', False), - 'speed_limit_mbs': profile_data.get('speed_limit_mbs', 0), - 'limit_session_enabled': profile_data.get('limit_session_enabled', False), - 'session_limit': profile_data.get('session_limit', 0), - 'cost': profile_data.get('cost', 0) - } + defaults=defaults ) if profile_created: @@ -319,14 +321,15 @@ def handle(self, *args, **options): errors.append(f'Instance {i+1}: {str(e)}') # Summary - self.stdout.write('\n' + '='*50) + self.stdout.write('\n' + '=' * 50) self.stdout.write('SUMMARY') - self.stdout.write('='*50) + self.stdout.write('=' * 50) if created_objects: self.stdout.write(f'\nCreated/Found {len(created_objects)} objects:') for obj in created_objects: - status_color = self.style.SUCCESS if obj['status'] == 'created' else self.style.WARNING + status_color = (self.style.SUCCESS if obj['status'] == 'created' + else self.style.WARNING) self.stdout.write( status_color(f" {obj['type'].title()}: {obj['name']} ({obj['status']})") ) diff --git a/inethi/core/management/commands/create_smart_contracts_from_json.py b/inethi/core/management/commands/create_smart_contracts_from_json.py index cc1dfca..e814dc4 100644 --- a/inethi/core/management/commands/create_smart_contracts_from_json.py +++ b/inethi/core/management/commands/create_smart_contracts_from_json.py @@ -13,7 +13,7 @@ class Command(BaseCommand): """Django command to create smart contracts from JSON file.""" - + help = 'Creates smart contracts from JSON file and assigns to specified users' def add_arguments(self, parser): @@ -31,7 +31,8 @@ def add_arguments(self, parser): parser.add_argument( '--default-user', type=str, - help='Default username to assign contracts to if user_name not specified in JSON (defaults to SUPERUSER_USERNAME from env)' + help=('Default username to assign contracts to if user_name not specified in JSON ' + '(defaults to SUPERUSER_USERNAME from env)') ) def handle(self, *args, **options): @@ -39,23 +40,23 @@ def handle(self, *args, **options): json_file = options['json_file'] dry_run = options['dry_run'] default_user = options['default_user'] - + # Get default user username from env if not provided if not default_user: default_user = os.getenv('SUPERUSER_USERNAME') if not default_user: raise CommandError('SUPERUSER_USERNAME not found in environment variables.') - + # Get default user try: - default_user_obj = User.objects.get(username=default_user) + User.objects.get(username=default_user) except User.DoesNotExist: raise CommandError(f'User with username "{default_user}" does not exist.') - + # Check if file exists if not os.path.exists(json_file): raise CommandError(f'JSON file "{json_file}" does not exist.') - + # Read and parse JSON file try: with open(json_file, 'r') as f: @@ -64,34 +65,34 @@ def handle(self, *args, **options): raise CommandError(f'Invalid JSON format: {str(e)}') except Exception as e: raise CommandError(f'Error reading JSON file: {str(e)}') - + # Validate JSON structure if not isinstance(contracts_data, list): raise CommandError('JSON file must contain a list of contract objects.') - + if dry_run: self.stdout.write(self.style.WARNING('DRY RUN MODE - No contracts will be created')) - + created_contracts = [] errors = [] - + for i, contract_data in enumerate(contracts_data): try: # Validate required fields required_fields = ['name', 'address', 'contract_type'] missing_fields = [field for field in required_fields if field not in contract_data] - + if missing_fields: errors.append(f'Contract {i+1}: Missing required fields: {", ".join(missing_fields)}') continue - + name = contract_data['name'] address = contract_data['address'] contract_type = contract_data['contract_type'] description = contract_data.get('description', '') write_access = contract_data.get('write_access', False) read_access = contract_data.get('read_access', True) - + # Get user to assign contract to user_name = contract_data.get('user_name', default_user) try: @@ -99,20 +100,21 @@ def handle(self, *args, **options): except User.DoesNotExist: errors.append(f'Contract {i+1}: User "{user_name}" does not exist') continue - + if dry_run: self.stdout.write( - f'Would create contract: {name} ({contract_type}) at {address} assigned to user: {user_name}' + f'Would create contract: {name} ({contract_type}) at {address} ' + f'assigned to user: {user_name}' ) continue - + # Create contract in transaction with transaction.atomic(): # Check if contract already exists if SmartContract.objects.filter(address=address).exists(): errors.append(f'Contract {i+1}: Address "{address}" already exists') continue - + # Create base contract base_contract = SmartContract.objects.create( name=name, @@ -123,14 +125,14 @@ def handle(self, *args, **options): read_access=read_access, contract_type=contract_type ) - + # Create specific contract type if needed if contract_type.lower() == 'faucet': owner_address = contract_data.get('owner_address', '') if not owner_address: errors.append(f'Contract {i+1}: Faucet contract requires owner_address') continue - + FaucetSmartContract.objects.create( smartcontract_ptr=base_contract, owner_address=owner_address, @@ -140,13 +142,13 @@ def handle(self, *args, **options): next_time=contract_data.get('next_time', False), registry_address=contract_data.get('registry_address', '') ) - + elif contract_type.lower() == 'account_index': owner_address = contract_data.get('owner_address', '') if not owner_address: errors.append(f'Contract {i+1}: Account index contract requires owner_address') continue - + AccountsIndexContract.objects.create( smartcontract_ptr=base_contract, owner_address=owner_address, @@ -158,24 +160,25 @@ def handle(self, *args, **options): add=contract_data.get('add', False), remove=contract_data.get('remove', False) ) - + created_contracts.append({ 'name': name, 'address': address, 'contract_type': contract_type, 'user': user_name }) - + self.stdout.write( self.style.SUCCESS( - f'Created contract "{name}" ({contract_type}) at {address} assigned to user: {user_name}' + f'Created contract "{name}" ({contract_type}) at {address} ' + f'assigned to user: {user_name}' ) ) - + except Exception as e: errors.append(f'Contract {i+1}: {str(e)}') continue - + # Summary if dry_run: self.stdout.write( @@ -185,12 +188,14 @@ def handle(self, *args, **options): self.stdout.write( self.style.SUCCESS(f'Successfully created {len(created_contracts)} contracts') ) - + if created_contracts: self.stdout.write('\nCreated contracts:') for contract in created_contracts: - self.stdout.write(f' - {contract["name"]} ({contract["contract_type"]}) at {contract["address"]} assigned to {contract["user"]}') - + self.stdout.write( + f' - {contract["name"]} ({contract["contract_type"]}) at {contract["address"]} ' + f'assigned to {contract["user"]}') + if errors: self.stdout.write('\nErrors:') for error in errors: diff --git a/inethi/core/management/commands/create_superuser.py b/inethi/core/management/commands/create_superuser.py index aa7616b..591415f 100644 --- a/inethi/core/management/commands/create_superuser.py +++ b/inethi/core/management/commands/create_superuser.py @@ -4,14 +4,13 @@ import os from django.core.management.base import BaseCommand from django.contrib.auth import get_user_model -from django.conf import settings User = get_user_model() class Command(BaseCommand): """Django command to create a superuser using environment variables.""" - + help = 'Creates a superuser using SUPERUSER_USERNAME and SUPERUSER_PASSWORD from .env' def handle(self, *args, **options): @@ -19,26 +18,26 @@ def handle(self, *args, **options): # Get credentials from environment variables username = os.getenv('SUPERUSER_USERNAME') password = os.getenv('SUPERUSER_PASSWORD') - + if not username: self.stdout.write( self.style.ERROR('SUPERUSER_USERNAME not found in environment variables.') ) return - + if not password: self.stdout.write( self.style.ERROR('SUPERUSER_PASSWORD not found in environment variables.') ) return - + # Check if superuser already exists if User.objects.filter(username=username).exists(): self.stdout.write( self.style.WARNING(f'Superuser with username "{username}" already exists.') ) return - + # Create superuser try: user = User.objects.create_superuser( diff --git a/inethi/core/management/commands/create_users_from_json.py b/inethi/core/management/commands/create_users_from_json.py index 5ab80a3..b9e564d 100644 --- a/inethi/core/management/commands/create_users_from_json.py +++ b/inethi/core/management/commands/create_users_from_json.py @@ -18,7 +18,7 @@ class Command(BaseCommand): """Django command to create users from JSON file with wallets.""" - + help = 'Creates users from JSON file with wallets and network admin permissions' def add_arguments(self, parser): @@ -76,7 +76,7 @@ def create_wallet_for_user(self, user, wallet_name='default'): address=settings.ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS ) p_key_admin = decrypt_private_key(account_index_creator.private_key) - + # Create new CryptoUtils instance for registry operations registry_crypto = CryptoUtils( contract_abi_path=settings.ABI_FILE_PATH, @@ -84,20 +84,20 @@ def create_wallet_for_user(self, user, wallet_name='default'): registry=settings.FAUCET_AND_INDEX_ENABLED, faucet=settings.FAUCET_AND_INDEX_ENABLED, ) - + # Add the wallet to the account index for Krone registry_crypto.registry_add(p_key_admin, w_addr) - + # send the account gas faucet_creator = Wallet.objects.get( address=settings.FAUCET_ADMIN_WALLET_ADDRESS ) p_key_faucet = decrypt_private_key(faucet_creator.private_key) - + # Add small delay between transactions to avoid nonce conflicts import time time.sleep(1.0) - + # Create new CryptoUtils instance for faucet operations faucet_crypto = CryptoUtils( contract_abi_path=settings.ABI_FILE_PATH, @@ -113,7 +113,7 @@ def create_wallet_for_user(self, user, wallet_name='default'): # Create the wallet return Wallet.objects.create(**wallet_data) - + except Exception as e: logger.error(f"Error creating wallet for user {user.username}: {e}") return None @@ -122,11 +122,11 @@ def handle(self, *args, **options): """Entry point for Django management command.""" json_file = options['json_file'] dry_run = options['dry_run'] - + # Check if file exists if not os.path.exists(json_file): raise CommandError(f'JSON file "{json_file}" does not exist.') - + # Read and parse JSON file try: with open(json_file, 'r') as f: @@ -135,40 +135,40 @@ def handle(self, *args, **options): raise CommandError(f'Invalid JSON format: {str(e)}') except Exception as e: raise CommandError(f'Error reading JSON file: {str(e)}') - + # Validate JSON structure if not isinstance(users_data, list): raise CommandError('JSON file must contain a list of user objects.') - + if dry_run: self.stdout.write(self.style.WARNING('DRY RUN MODE - No users will be created')) - + created_users = [] errors = [] - + for i, user_data in enumerate(users_data): try: # Validate required fields (wallet_address and private_key are now optional) required_fields = ['username', 'password', 'email'] missing_fields = [field for field in required_fields if field not in user_data] - + if missing_fields: errors.append(f'User {i+1}: Missing required fields: {", ".join(missing_fields)}') continue - + username = user_data['username'] password = user_data['password'] email = user_data['email'] - + # Optional fields first_name = user_data.get('first_name', '') last_name = user_data.get('last_name', '') phone_number = user_data.get('phone_number', '') - + # Wallet fields (optional - will be auto-generated if not provided) wallet_address = user_data.get('wallet_address') private_key = user_data.get('private_key') - + if dry_run: if wallet_address: self.stdout.write( @@ -179,23 +179,23 @@ def handle(self, *args, **options): f'Would create user: {username} ({email}) with auto-generated wallet' ) continue - + # Create user and wallet in transaction with transaction.atomic(): # Check if user already exists if User.objects.filter(username=username).exists(): errors.append(f'User {i+1}: Username "{username}" already exists') continue - + if User.objects.filter(email=email).exists(): errors.append(f'User {i+1}: Email "{email}" already exists') continue - + # Check if wallet address already exists (only if provided) if wallet_address and Wallet.objects.filter(address=wallet_address).exists(): errors.append(f'User {i+1}: Wallet address "{wallet_address}" already exists') continue - + # Create user user = User.objects.create_user( email=email, @@ -205,14 +205,14 @@ def handle(self, *args, **options): last_name=last_name, phone_number=phone_number ) - + # Mark as network admin user.user_permissions.add( User._meta.get_field('user_permissions').related_model.objects.get( codename='network_admin' ) ) - + # Create wallet if wallet_address and private_key: # Use provided wallet details @@ -232,13 +232,13 @@ def handle(self, *args, **options): if not wallet: errors.append(f'User {i+1}: Failed to create wallet for user "{username}"') continue - + created_users.append({ 'username': username, 'email': email, 'wallet_address': wallet.address }) - + if wallet_address: self.stdout.write( self.style.SUCCESS( @@ -251,11 +251,11 @@ def handle(self, *args, **options): f'Created user "{username}" with auto-generated wallet {wallet.address}' ) ) - + except Exception as e: errors.append(f'User {i+1}: {str(e)}') continue - + # Summary if dry_run: self.stdout.write( @@ -265,12 +265,12 @@ def handle(self, *args, **options): self.stdout.write( self.style.SUCCESS(f'Successfully created {len(created_users)} users') ) - + if created_users: self.stdout.write('\nCreated users:') for user in created_users: self.stdout.write(f' - {user["username"]} ({user["email"]})') - + if errors: self.stdout.write('\nErrors:') for error in errors: diff --git a/inethi/core/management/commands/wait_for_db.py b/inethi/core/management/commands/wait_for_db.py index ac8dc65..f6d2318 100644 --- a/inethi/core/management/commands/wait_for_db.py +++ b/inethi/core/management/commands/wait_for_db.py @@ -11,6 +11,7 @@ class Command(BaseCommand): """Django command to pause execution until database is available.""" + def handle(self, *args, **options): """Entry point for Django management command.""" self.stdout.write('Waiting for database...') diff --git a/inethi/inethi/settings.py b/inethi/inethi/settings.py index 5fd8c5e..dbb9201 100644 --- a/inethi/inethi/settings.py +++ b/inethi/inethi/settings.py @@ -28,10 +28,10 @@ # See https://docs.djangoproject.com/en/5.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY=env("SECRET_KEY") +SECRET_KEY = env("SECRET_KEY") # SECURITY WARNING: don't run with debug turned on in production! -DEBUG=env("DEBUG") +DEBUG = env("DEBUG") CORS_ALLOW_CREDENTIALS = True CORS_ALLOWED_ORIGINS = env("CORS_ALLOWED_ORIGINS") @@ -59,7 +59,7 @@ 'radiusdesk', 'network', 'api_key', - 'reward' + 'reward' ] MIDDLEWARE = [ @@ -139,7 +139,7 @@ USE_I18N = True USE_TZ = True -CELERY_BROKER_URL=env("CELERY_BROKER_URL") +CELERY_BROKER_URL = env("CELERY_BROKER_URL") # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/5.1/howto/static-files/ @@ -184,7 +184,7 @@ KRONE_CONTRACT_ABI = json.load(abi_file) WALLET_ENCRYPTION_KEY = env("WALLET_ENCRYPTION_KEY") BLOCKCHAIN_PROVIDER_URL = env("BLOCKCHAIN_PROVIDER_URL") -CONTRACT_ADDRESS=env("CONTRACT_ADDRESS") +CONTRACT_ADDRESS = env("CONTRACT_ADDRESS") SPECTACULAR_SETTINGS = { 'TITLE': 'iNethi API', @@ -198,14 +198,14 @@ FAUCET_ABI_FILE_PATH = os.path.join(BASE_DIR, "contracts/faucet_abi.json") REGISTRY_ABI_FILE_PATH = os.path.join(BASE_DIR, "contracts/registry_abi.json") REGISTRY_ADDRESS = env("REGISTRY_ADDRESS") -FAUCET_ADDRESS =env("FAUCET_ADDRESS") +FAUCET_ADDRESS = env("FAUCET_ADDRESS") # owner of smart contracts -ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS=env("ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS") -FAUCET_ADMIN_WALLET_ADDRESS=env("FAUCET_ADMIN_WALLET_ADDRESS") +ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS = env("ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS") +FAUCET_ADMIN_WALLET_ADDRESS = env("FAUCET_ADMIN_WALLET_ADDRESS") # Enable account index and faucet -FAUCET_AND_INDEX_ENABLED=env("FAUCET_AND_INDEX_ENABLED") +FAUCET_AND_INDEX_ENABLED = env("FAUCET_AND_INDEX_ENABLED") # Logging configuration # Use /tmp directory for logs in CI environment @@ -267,7 +267,7 @@ 'file': { 'class': 'logging.handlers.RotatingFileHandler', 'filename': os.path.join(BASE_DIR, 'errors.log'), - 'maxBytes': 1024*1024*50, # 50MB + 'maxBytes': 1024 * 1024 * 50, # 50MB 'backupCount': 5, 'formatter': 'verbose', 'level': 'ERROR', @@ -275,7 +275,7 @@ 'wallet_debug_file': { 'class': 'logging.handlers.RotatingFileHandler', 'filename': os.path.join(BASE_DIR, 'debug.log'), - 'maxBytes': 1024*1024*50, # 50MB + 'maxBytes': 1024 * 1024 * 50, # 50MB 'backupCount': 5, 'formatter': 'verbose', 'level': 'INFO', @@ -283,7 +283,7 @@ 'rewards_file': { 'class': 'logging.handlers.RotatingFileHandler', 'filename': os.path.join(BASE_DIR, 'rewards.log'), - 'maxBytes': 1024*1024*50, # 50MB + 'maxBytes': 1024 * 1024 * 50, # 50MB 'backupCount': 15, 'formatter': 'verbose', 'level': 'INFO', @@ -293,4 +293,4 @@ LOGGING['loggers']['django']['handlers'] = ['console', 'file'] LOGGING['loggers']['wallet']['handlers'] = ['wallet_debug_file', 'file', 'console'] LOGGING['loggers']['utils.crypto']['handlers'] = ['wallet_debug_file', 'file', 'console'] - LOGGING['loggers']['reward']['handlers'] = ['rewards_file', 'console'] \ No newline at end of file + LOGGING['loggers']['reward']['handlers'] = ['rewards_file', 'console'] diff --git a/inethi/network/views.py b/inethi/network/views.py index a146932..87480ec 100644 --- a/inethi/network/views.py +++ b/inethi/network/views.py @@ -182,10 +182,10 @@ def aggregate_ping_view(request): valid_aggregations = { '15m': 'network_ping_aggregate_15m', '60m': 'network_ping_aggregate_60m', - '6h': 'network_ping_aggregate_6h', + '6h': 'network_ping_aggregate_6h', '12h': 'network_ping_aggregate_12h', '24h': 'network_ping_aggregate_24h', - '7d': 'network_ping_aggregate_7d', + '7d': 'network_ping_aggregate_7d', '30d': 'network_ping_aggregate_30d', '90d': 'network_ping_aggregate_90d', '365d': 'network_ping_aggregate_365d', @@ -215,7 +215,7 @@ def aggregate_ping_view(request): try: host_ids = [ int(x.strip()) for x in host_ids_param.split(',') if x.strip() - ] + ] except ValueError: return Response( {"error": "host_ids must be a comma-separated list of integers."}, @@ -335,7 +335,7 @@ def aggregate_uptime_view(request): if network_id: all_host_ids = set( Host.objects.filter(network=network).values_list('id', flat=True) - ) + ) elif host_ids: all_host_ids = set(host_ids) else: diff --git a/inethi/radiusdesk/views.py b/inethi/radiusdesk/views.py index 9eedbf0..4d11168 100644 --- a/inethi/radiusdesk/views.py +++ b/inethi/radiusdesk/views.py @@ -245,7 +245,7 @@ def user_vouchers(self, request): """ user = request.user vouchers = Voucher.objects.filter(user=user).order_by('-created_at') - + # Use pagination paginator = VoucherPagination() page = paginator.paginate_queryset(vouchers, request) @@ -313,7 +313,7 @@ def search_vouchers(self, request): vouchers = vouchers.filter(user__username__icontains=username) vouchers = vouchers.order_by('-created_at') - + # Use pagination paginator = VoucherPagination() page = paginator.paginate_queryset(vouchers, request) @@ -407,7 +407,7 @@ def add_voucher(self, request): voucher_codes = [voucher['name'] for voucher in voucher_response['data']] created_vouchers = [] - + if sender_address and recipient_address and amount and token: Transaction.objects.create( recipient_address=recipient_address, @@ -416,7 +416,7 @@ def add_voucher(self, request): category=category, token=token ) - + # Create a voucher record for each voucher code for voucher_code in voucher_codes: voucher_obj = Voucher.objects.create( @@ -472,7 +472,7 @@ def wallet_address_vouchers(self, request): vouchers = Voucher.objects.filter( wallet_address=wallet_address ).order_by('-created_at') - + # Use pagination paginator = VoucherPagination() page = paginator.paginate_queryset(vouchers, request) @@ -510,7 +510,7 @@ def get_all_vouchers_stats(self, request): "error": "Missing required parameters: " "radius_desk_instance_pk and radius_desk_cloud_pk" - }, + }, status=status.HTTP_400_BAD_REQUEST ) @@ -522,7 +522,7 @@ def get_all_vouchers_stats(self, request): and not request.user.is_superuser): if not radius_desk_instance_db.administrators.filter( pk=request.user.pk - ).exists(): + ).exists(): return Response( {"error": "Unauthorized for this RadiusDeskInstance."}, status=status.HTTP_403_FORBIDDEN @@ -601,7 +601,7 @@ def get_all_vouchers_stats_db(self, request): "error": "Missing required parameters: " "radius_desk_instance_pk and radius_desk_cloud_pk" - }, + }, status=status.HTTP_400_BAD_REQUEST ) @@ -613,7 +613,7 @@ def get_all_vouchers_stats_db(self, request): and not request.user.is_superuser): if not radius_desk_instance_db.administrators.filter( pk=request.user.pk - ).exists(): + ).exists(): return Response( {"error": "Unauthorized for this RadiusDeskInstance."}, status=status.HTTP_403_FORBIDDEN @@ -624,7 +624,7 @@ def get_all_vouchers_stats_db(self, request): radius_desk_instance=radius_desk_instance_pk, cloud=radius_desk_cloud_pk ).order_by('-created_at') - + # Use pagination paginator = VoucherPagination() page = paginator.paginate_queryset(vouchers, request) diff --git a/inethi/reward/tasks.py b/inethi/reward/tasks.py index c8689bc..64b064b 100644 --- a/inethi/reward/tasks.py +++ b/inethi/reward/tasks.py @@ -93,9 +93,9 @@ def process_reward(reward_id): # Send tokens try: tx_receipt = crypto_utils.send_to_wallet_address( - admin_wallet.address, - admin_private_key, - device_wallet.address, + admin_wallet.address, + admin_private_key, + device_wallet.address, awarded_amount ) tx_hash = None @@ -103,8 +103,8 @@ def process_reward(reward_id): 'transactionHash' in tx_receipt and hasattr( tx_receipt['transactionHash'], 'hex' - ) - ): + ) + ): tx_hash = tx_receipt['transactionHash'].hex() logger.info( f"Blockchain transaction created for reward {reward_id}: " diff --git a/inethi/smart_contracts/views.py b/inethi/smart_contracts/views.py index 8a1ec4f..5b61df7 100644 --- a/inethi/smart_contracts/views.py +++ b/inethi/smart_contracts/views.py @@ -157,7 +157,7 @@ def registry_add(self, request, pk=None): { 'contract_type': 'You cannot be added to this contract type.' - }, + }, status=status.HTTP_400_BAD_REQUEST ) @@ -416,7 +416,6 @@ def faucet_gimme(self, request, pk=None): if wallet_exists: try: wallet = Wallet.objects.get(user=request.user) - address = wallet.address decrypted_private_key = decrypt_private_key( wallet.private_key @@ -429,10 +428,10 @@ def faucet_gimme(self, request, pk=None): registry=settings.FAUCET_AND_INDEX_ENABLED, faucet=settings.FAUCET_AND_INDEX_ENABLED, ) - + # Call faucet gimme (new method returns transaction receipt) receipt = crypto_utils.faucet_gimme(decrypted_private_key, wallet.address) - + # For now, return success since the new method doesn't return detailed info # You may want to add additional checks here if needed return Response( @@ -447,7 +446,7 @@ def faucet_gimme(self, request, pk=None): except Exception as e: return Response( { - 'error': f'Failed to call gimme: {e}.' + 'error': f'Failed to call gimme: {e}.' }, status=status.HTTP_500_INTERNAL_SERVER_ERROR ) diff --git a/inethi/user/admin.py b/inethi/user/admin.py index a4e11e9..6af52da 100644 --- a/inethi/user/admin.py +++ b/inethi/user/admin.py @@ -1,3 +1,3 @@ -from django.contrib import admin # noqa +from django.contrib import admin # noqa # Register your models here. diff --git a/inethi/utils/crypto.py b/inethi/utils/crypto.py index 6647cff..16cbcd6 100644 --- a/inethi/utils/crypto.py +++ b/inethi/utils/crypto.py @@ -66,7 +66,7 @@ def __init__( ): self.w3 = Web3(Web3.HTTPProvider(settings.BLOCKCHAIN_PROVIDER_URL)) self.contract = load_contract(contract_abi_path, contract_address) - + if registry: self.registry = load_contract( settings.REGISTRY_ABI_FILE_PATH, @@ -74,7 +74,7 @@ def __init__( ) else: self.registry = None - + if faucet: self.faucet = load_contract( settings.FAUCET_ABI_FILE_PATH, @@ -151,10 +151,10 @@ def send_to_wallet_address( # Calculate token amount adjusted for decimals decimals = self.contract.functions.decimals().call() token_amount = int(amount * (10**decimals)) - + attempt = 0 logger.info(f"send_to_wallet_address: {to_address} from {from_address} for {amount}") - + while attempt < max_retries: try: # Estimate gas and get current gas price @@ -170,13 +170,13 @@ def send_to_wallet_address( f"transfering to {to_address} from {from_address} for {token_amount} " f"with gas {gas} and gas_price {gas_price}" ) - + # Get nonce if not provided if nonce is None: nonce_to_use = self.w3.eth.get_transaction_count(Web3.to_checksum_address(from_address)) else: nonce_to_use = nonce - + # Prepare and sign the transaction transfer = self.contract.functions.transfer( Web3.to_checksum_address(to_address), token_amount @@ -196,7 +196,7 @@ def send_to_wallet_address( f"send_to_wallet_address: {to_address}, nonce {nonce} failed." ) raise Exception("Transaction failed") - + except Exception as e: logger.error(f"send_to_wallet_address attempt {attempt+1} failed: {e}") # Check for nonce error @@ -208,7 +208,7 @@ def send_to_wallet_address( attempt += 1 continue raise - + raise Exception("send_to_wallet_address failed after retries") # ---------------- Faucet ---------------- # @@ -226,12 +226,12 @@ def faucet_give_to( """ if not self.faucet: raise Exception("Faucet contract not loaded") - + account = self.w3.eth.account.from_key(private_key) sender_address = account.address attempt = 0 logger.info(f"faucet_give_to: {give_to_address} with nonce {nonce}") - + while attempt < max_retries: try: if nonce is None: @@ -240,14 +240,14 @@ def faucet_give_to( ) else: nonce_to_use = nonce - + gas_price = self.w3.eth.gas_price gas_estimate = self.faucet.functions.giveTo( Web3.to_checksum_address(give_to_address) ).estimate_gas({ 'from': Web3.to_checksum_address(sender_address) }) - + tx = self.faucet.functions.giveTo( Web3.to_checksum_address(give_to_address) ).build_transaction( @@ -259,7 +259,7 @@ def faucet_give_to( 'chainId': self.w3.eth.chain_id, } ) - + receipt = self.complete_transaction(private_key, tx) if receipt: return receipt @@ -268,7 +268,7 @@ def faucet_give_to( f"faucet_give_to: {give_to_address}, nonce {nonce} failed." ) raise Exception("Transaction failed") - + except Exception as e: logger.error(f"faucet_give_to attempt {attempt+1} failed: {e}") # Check for nonce error @@ -282,14 +282,14 @@ def faucet_give_to( attempt += 1 continue raise - + raise Exception("faucet_give_to failed after retries") def faucet_gimme(self, private_key: str, address: str) -> dict: """Call the gimme function for an account from the faucet""" if not self.faucet: raise Exception("Faucet contract not loaded") - + raw_balance = self.w3.eth.get_balance(Web3.to_checksum_address(address)) balance = convert_wei_to_celo(raw_balance) @@ -326,7 +326,7 @@ def faucet_gimme(self, private_key: str, address: str) -> dict: gas_estimate = self.faucet.functions.gimme().estimate_gas({ 'from': Web3.to_checksum_address(address), }) - + tx = self.faucet.functions.gimme().build_transaction({ 'from': Web3.to_checksum_address(address), 'nonce': nonce, @@ -359,7 +359,7 @@ def faucet_gimme(self, private_key: str, address: str) -> dict: } except Exception as e: print(f'Error processing events: {e}') - + return { 'balance': balance, 'threshold': faucet_thresh, @@ -373,7 +373,7 @@ def faucet_check_time(self, address_to_check: str) -> dict: """Check if an address can receive funds at this time""" if not self.faucet: raise Exception("Faucet contract not loaded") - + next_time = self.faucet.functions.nextTime( _subject=address_to_check ).call({'from': address_to_check}) @@ -391,7 +391,7 @@ def faucet_balance_threshold(self, address: str) -> float: """Check what the threshold amount is for a faucet""" if not self.faucet: raise Exception("Faucet contract not loaded") - + try: balance_threshold = self.faucet.functions.nextBalance( _subject=Web3.to_checksum_address(address) @@ -413,11 +413,11 @@ def registry_add( """ if self.registry is None: raise Exception("Registry contract not loaded.") - + account = self.w3.eth.account.from_key(private_key) sender_address = account.address attempt = 0 - + while attempt < max_retries: try: if nonce is None: @@ -426,18 +426,18 @@ def registry_add( ) else: nonce_to_use = nonce - + gas_price = self.w3.eth.gas_price gas_estimate = self.registry.functions.add( Web3.to_checksum_address(address_to_add) ).estimate_gas({ 'from': Web3.to_checksum_address(sender_address) }) - + logger.info( f"registry_add: {address_to_add}, gas {gas_estimate}" ) - + tx = self.registry.functions.add( Web3.to_checksum_address(address_to_add) ).build_transaction( @@ -449,13 +449,13 @@ def registry_add( 'chainId': self.w3.eth.chain_id, } ) - + receipt = self.complete_transaction(private_key, tx) if receipt: return receipt else: raise Exception("Transaction failed") - + except Exception as e: logger.error(f"registry_add attempt {attempt+1} failed: {e}") # Check for nonce error @@ -469,14 +469,14 @@ def registry_add( attempt += 1 continue raise - + raise Exception("registry_add failed after retries") def account_index_check_active(self, address_to_check: str) -> bool: """Check if an address is active on the account index.""" if self.registry is None: raise Exception("Registry contract not loaded.") - + active = self.registry.functions.isActive( Web3.to_checksum_address(address_to_check) ).call() diff --git a/inethi/utils/superuser_or_read_only_permission.py b/inethi/utils/superuser_or_read_only_permission.py index dcbda89..eb590d5 100644 --- a/inethi/utils/superuser_or_read_only_permission.py +++ b/inethi/utils/superuser_or_read_only_permission.py @@ -8,6 +8,7 @@ class IsSuperUserOrReadOnly(BasePermission): """ # Allow create, update, and delete for superusers only # Allow read-only methods for all authenticated users + def has_permission(self, request, view): if request.method in SAFE_METHODS: return request.user.is_authenticated diff --git a/inethi/wallet/serializers.py b/inethi/wallet/serializers.py index c8899e4..91d6dc3 100644 --- a/inethi/wallet/serializers.py +++ b/inethi/wallet/serializers.py @@ -65,7 +65,7 @@ def create(self, validated_data): address=settings.ACCOUNT_INDEX_ADMIN_WALLET_ADDRESS ) p_key_admin = decrypt_private_key(account_index_creator.private_key) - + # Create new CryptoUtils instance for registry operations registry_crypto = CryptoUtils( contract_abi_path=settings.ABI_FILE_PATH, @@ -74,13 +74,13 @@ def create(self, validated_data): faucet=settings.FAUCET_AND_INDEX_ENABLED, ) registry_crypto.registry_add(p_key_admin, w_addr) - + # send the account gas faucet_creator = Wallet.objects.get( # type: ignore[attr-defined] address=settings.FAUCET_ADMIN_WALLET_ADDRESS ) p_key_faucet = decrypt_private_key(faucet_creator.private_key) - + # Create new CryptoUtils instance for faucet operations faucet_crypto = CryptoUtils( contract_abi_path=settings.ABI_FILE_PATH, From 4f4c8bbb5d5b3d60abbbec62737ee9ea48347cf9 Mon Sep 17 00:00:00 2001 From: Keegan White Date: Fri, 22 Aug 2025 13:05:14 +0200 Subject: [PATCH 3/7] Refactor Docker Compose for Development Environment - Updated container names and volume names in `docker-compose-dev.yml` to include `-dev` suffix for clarity. - Removed obsolete migration files from various apps, including `api_key`, `core`, `network`, and `radiusdesk`, to clean up the codebase. - Ensured consistency in migration management by deleting unnecessary migration files that are no longer in use. --- docker-compose-dev.yml | 20 +- .../migrations/0002_alter_apikey_key.py | 18 -- inethi/core/migrations/0002_smartcontract.py | 28 -- ...excontract_faucetsmartcontract_and_more.py | 58 ---- .../0004_faucetsmartcontract_owner_address.py | 19 -- inethi/core/migrations/0005_service.py | 22 -- .../migrations/0006_service_description.py | 18 -- inethi/core/migrations/0007_service_url.py | 19 -- .../core/migrations/0008_user_phone_number.py | 18 -- inethi/core/migrations/0009_transaction.py | 33 -- ...options_transaction_block_hash_and_more.py | 28 -- .../0011_alter_transaction_sender.py | 20 -- ...nder_address_transaction_token_and_more.py | 48 --- .../migrations/0013_alter_user_options.py | 17 - .../migrations/0002_alter_ping_primary_key.py | 26 -- .../migrations/0003_make_ping_hypertable.py | 25 -- .../0004_create_continuous_aggregates_all.py | 243 --------------- .../migrations/0005_host_device_type.py | 18 -- .../0006_network_host_network_ping_network.py | 35 --- ...address_alter_host_mac_address_and_more.py | 28 -- ...work_name_alter_network_unique_together.py | 24 -- .../0009_alter_host_unique_together.py | 17 - .../0002_alter_radiusdeskinstance_token.py | 18 -- .../0003_rename_realm_realm_cloud.py | 18 -- ...ucher_wallet_address_alter_voucher_user.py | 26 -- .../0005_radiusdeskinstance_administrators.py | 20 -- .../0006_radiusdeskinstance_accepts_crypto.py | 18 -- .../migrations/0007_radiusdeskprofile_cost.py | 18 -- .../0002_reward_interval_minutes.py | 18 -- ...03_alter_uptimerewardtransaction_reward.py | 19 -- .../reward/migrations/0004_reward_network.py | 20 -- .../transaction/tests/test_transaction_api.py | 291 ------------------ 32 files changed, 10 insertions(+), 1238 deletions(-) delete mode 100644 inethi/api_key/migrations/0002_alter_apikey_key.py delete mode 100644 inethi/core/migrations/0002_smartcontract.py delete mode 100644 inethi/core/migrations/0003_accountsindexcontract_faucetsmartcontract_and_more.py delete mode 100644 inethi/core/migrations/0004_faucetsmartcontract_owner_address.py delete mode 100644 inethi/core/migrations/0005_service.py delete mode 100644 inethi/core/migrations/0006_service_description.py delete mode 100644 inethi/core/migrations/0007_service_url.py delete mode 100644 inethi/core/migrations/0008_user_phone_number.py delete mode 100644 inethi/core/migrations/0009_transaction.py delete mode 100644 inethi/core/migrations/0010_alter_transaction_options_transaction_block_hash_and_more.py delete mode 100644 inethi/core/migrations/0011_alter_transaction_sender.py delete mode 100644 inethi/core/migrations/0012_transaction_sender_address_transaction_token_and_more.py delete mode 100644 inethi/core/migrations/0013_alter_user_options.py delete mode 100644 inethi/network/migrations/0002_alter_ping_primary_key.py delete mode 100644 inethi/network/migrations/0003_make_ping_hypertable.py delete mode 100644 inethi/network/migrations/0004_create_continuous_aggregates_all.py delete mode 100644 inethi/network/migrations/0005_host_device_type.py delete mode 100644 inethi/network/migrations/0006_network_host_network_ping_network.py delete mode 100644 inethi/network/migrations/0007_alter_host_ip_address_alter_host_mac_address_and_more.py delete mode 100644 inethi/network/migrations/0008_alter_network_name_alter_network_unique_together.py delete mode 100644 inethi/network/migrations/0009_alter_host_unique_together.py delete mode 100644 inethi/radiusdesk/migrations/0002_alter_radiusdeskinstance_token.py delete mode 100644 inethi/radiusdesk/migrations/0003_rename_realm_realm_cloud.py delete mode 100644 inethi/radiusdesk/migrations/0004_voucher_wallet_address_alter_voucher_user.py delete mode 100644 inethi/radiusdesk/migrations/0005_radiusdeskinstance_administrators.py delete mode 100644 inethi/radiusdesk/migrations/0006_radiusdeskinstance_accepts_crypto.py delete mode 100644 inethi/radiusdesk/migrations/0007_radiusdeskprofile_cost.py delete mode 100644 inethi/reward/migrations/0002_reward_interval_minutes.py delete mode 100644 inethi/reward/migrations/0003_alter_uptimerewardtransaction_reward.py delete mode 100644 inethi/reward/migrations/0004_reward_network.py delete mode 100644 inethi/transaction/tests/test_transaction_api.py diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 6c1ee47..f4bb2a8 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -1,6 +1,6 @@ services: app: - container_name: inethi-backend + container_name: inethi-backend-dev restart: no env_file: - ./.env @@ -30,11 +30,11 @@ services: db: image: timescale/timescaledb:latest-pg17 restart: no - container_name: inethi-backend-db-timescale + container_name: inethi-backend-db-timescale-dev env_file: - ./.env volumes: - - dev-db-data-timescale:/var/lib/postgresql/data + - dev-db-data-timescale-dev:/var/lib/postgresql/data environment: - POSTGRES_DB=${DB_NAME:-devdb} - POSTGRES_USER=${DB_USER:-devuser} @@ -44,7 +44,7 @@ services: keycloak: image: quay.io/keycloak/keycloak:25.0.6 restart: no - container_name: inethi-keycloak + container_name: inethi-keycloak-dev env_file: - ./.env ports: @@ -52,7 +52,7 @@ services: command: - start-dev volumes: - - dev-keycloak-data:/opt/keycloak + - dev-keycloak-data-dev:/opt/keycloak environment: - KEYCLOAK_ADMIN=${KEYCLOAK_MASTER_ADMIN:-devuser} - KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_MASTER_ADMIN_PASSWORD:-devpass} @@ -60,7 +60,7 @@ services: redis: image: redis:alpine restart: no - container_name: inethi-backend-celery-redis + container_name: inethi-backend-celery-redis-dev celery: build: @@ -74,7 +74,7 @@ services: environment: - CELERY_BROKER=redis://redis:6379/0 - CELERY_BACKEND=redis://redis:6379/0 - container_name: inethi-backend-celery + container_name: inethi-backend-celery-dev depends_on: - redis - db @@ -90,11 +90,11 @@ services: environment: - CELERY_BROKER=redis://redis:6379/0 - CELERY_BACKEND=redis://redis:6379/0 - container_name: inethi-backend-celery-beat + container_name: inethi-backend-celery-beat-dev depends_on: - redis - db volumes: - dev-db-data-timescale: - dev-keycloak-data: + dev-db-data-timescale-dev: + dev-keycloak-data-dev: diff --git a/inethi/api_key/migrations/0002_alter_apikey_key.py b/inethi/api_key/migrations/0002_alter_apikey_key.py deleted file mode 100644 index bedecad..0000000 --- a/inethi/api_key/migrations/0002_alter_apikey_key.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1 on 2025-02-03 09:55 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('api_key', '0001_initial'), - ] - - operations = [ - migrations.AlterField( - model_name='apikey', - name='key', - field=models.CharField(blank=True, max_length=255, unique=True), - ), - ] diff --git a/inethi/core/migrations/0002_smartcontract.py b/inethi/core/migrations/0002_smartcontract.py deleted file mode 100644 index 0b7716f..0000000 --- a/inethi/core/migrations/0002_smartcontract.py +++ /dev/null @@ -1,28 +0,0 @@ -# Generated by Django 5.1 on 2024-10-08 14:12 - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0001_initial'), - ] - - operations = [ - migrations.CreateModel( - name='SmartContract', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255)), - ('address', models.CharField(max_length=255, unique=True)), - ('description', models.TextField(blank=True)), - ('write_access', models.BooleanField(default=False)), - ('read_access', models.BooleanField(default=False)), - ('contract_type', models.CharField(choices=[('accounts index', 'Accounts Index'), ('eth faucet', 'Eth Faucet'), ('other', 'Other')], default='other', max_length=50)), - ('user', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), - ], - ), - ] diff --git a/inethi/core/migrations/0003_accountsindexcontract_faucetsmartcontract_and_more.py b/inethi/core/migrations/0003_accountsindexcontract_faucetsmartcontract_and_more.py deleted file mode 100644 index 5df9ef2..0000000 --- a/inethi/core/migrations/0003_accountsindexcontract_faucetsmartcontract_and_more.py +++ /dev/null @@ -1,58 +0,0 @@ -# Generated by Django 5.1 on 2024-10-09 11:25 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0002_smartcontract'), - ] - - operations = [ - migrations.CreateModel( - name='AccountsIndexContract', - fields=[ - ('smartcontract_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.smartcontract')), - ('owner_address', models.CharField(max_length=255)), - ('entry', models.BooleanField(default=False)), - ('entry_count', models.BooleanField(default=False)), - ('is_active', models.BooleanField(default=False)), - ('activate', models.BooleanField(default=False)), - ('deactivate', models.BooleanField(default=False)), - ('add', models.BooleanField(default=False)), - ('remove', models.BooleanField(default=False)), - ], - options={ - 'verbose_name': 'Account Index Smart Contract', - 'verbose_name_plural': 'Account Index Smart Contracts', - }, - bases=('core.smartcontract',), - ), - migrations.CreateModel( - name='FaucetSmartContract', - fields=[ - ('smartcontract_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.smartcontract')), - ('gimme', models.BooleanField(default=False)), - ('give_to', models.BooleanField(default=False)), - ('next_balance', models.BooleanField(default=False)), - ('next_time', models.BooleanField(default=False)), - ('registry_address', models.CharField(blank=True, max_length=255)), - ], - options={ - 'verbose_name': 'Faucet Smart Contract', - 'verbose_name_plural': 'Faucet Smart Contracts', - }, - bases=('core.smartcontract',), - ), - migrations.AlterModelOptions( - name='smartcontract', - options={'verbose_name': 'Smart Contract', 'verbose_name_plural': 'Smart Contracts'}, - ), - migrations.AlterField( - model_name='smartcontract', - name='contract_type', - field=models.CharField(max_length=255), - ), - ] diff --git a/inethi/core/migrations/0004_faucetsmartcontract_owner_address.py b/inethi/core/migrations/0004_faucetsmartcontract_owner_address.py deleted file mode 100644 index ba439be..0000000 --- a/inethi/core/migrations/0004_faucetsmartcontract_owner_address.py +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by Django 5.1 on 2024-10-10 10:09 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0003_accountsindexcontract_faucetsmartcontract_and_more'), - ] - - operations = [ - migrations.AddField( - model_name='faucetsmartcontract', - name='owner_address', - field=models.CharField(default='0xB821E49ADB53F0AbeD834278d5dFc57901c30Eea', max_length=255), - preserve_default=False, - ), - ] diff --git a/inethi/core/migrations/0005_service.py b/inethi/core/migrations/0005_service.py deleted file mode 100644 index 724009b..0000000 --- a/inethi/core/migrations/0005_service.py +++ /dev/null @@ -1,22 +0,0 @@ -# Generated by Django 5.1 on 2024-11-15 09:43 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0004_faucetsmartcontract_owner_address'), - ] - - operations = [ - migrations.CreateModel( - name='Service', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255)), - ('type', models.CharField(choices=[('entertainment', 'Entertainment'), ('learning', 'Learning'), ('utility', 'Utility')], default='utility', max_length=50)), - ('paid', models.BooleanField(default=False)), - ], - ), - ] diff --git a/inethi/core/migrations/0006_service_description.py b/inethi/core/migrations/0006_service_description.py deleted file mode 100644 index 7b22249..0000000 --- a/inethi/core/migrations/0006_service_description.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1 on 2024-11-15 11:43 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0005_service'), - ] - - operations = [ - migrations.AddField( - model_name='service', - name='description', - field=models.TextField(blank=True), - ), - ] diff --git a/inethi/core/migrations/0007_service_url.py b/inethi/core/migrations/0007_service_url.py deleted file mode 100644 index bfadf34..0000000 --- a/inethi/core/migrations/0007_service_url.py +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by Django 5.1 on 2024-11-15 11:50 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0006_service_description'), - ] - - operations = [ - migrations.AddField( - model_name='service', - name='url', - field=models.URLField(default='http://google.com', unique=True), - preserve_default=False, - ), - ] diff --git a/inethi/core/migrations/0008_user_phone_number.py b/inethi/core/migrations/0008_user_phone_number.py deleted file mode 100644 index 8051af4..0000000 --- a/inethi/core/migrations/0008_user_phone_number.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1 on 2024-12-05 09:25 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0007_service_url'), - ] - - operations = [ - migrations.AddField( - model_name='user', - name='phone_number', - field=models.CharField(blank=True, max_length=15, null=True), - ), - ] diff --git a/inethi/core/migrations/0009_transaction.py b/inethi/core/migrations/0009_transaction.py deleted file mode 100644 index 20fb39f..0000000 --- a/inethi/core/migrations/0009_transaction.py +++ /dev/null @@ -1,33 +0,0 @@ -# Generated by Django 5.1 on 2024-12-05 12:12 - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0008_user_phone_number'), - ] - - operations = [ - migrations.CreateModel( - name='Transaction', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('recipient_address', models.CharField(max_length=256)), - ('amount', models.DecimalField(decimal_places=8, max_digits=18)), - ('transaction_hash', models.CharField(max_length=256)), - ('block_number', models.IntegerField()), - ('gas_used', models.DecimalField(decimal_places=8, max_digits=18)), - ('category', models.CharField(choices=[('TRANSFER', 'Transfer'), ('REWARD', 'Reward'), ('PAYMENT', 'Payment'), ('OTHER', 'Other')], default='TRANSFER', max_length=50)), - ('timestamp', models.DateTimeField(auto_now_add=True)), - ('recipient', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='received_transactions', to=settings.AUTH_USER_MODEL)), - ('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sent_transactions', to=settings.AUTH_USER_MODEL)), - ], - options={ - 'permissions': [('view_all_transactions', 'Can view all transactions')], - }, - ), - ] diff --git a/inethi/core/migrations/0010_alter_transaction_options_transaction_block_hash_and_more.py b/inethi/core/migrations/0010_alter_transaction_options_transaction_block_hash_and_more.py deleted file mode 100644 index cc6ee71..0000000 --- a/inethi/core/migrations/0010_alter_transaction_options_transaction_block_hash_and_more.py +++ /dev/null @@ -1,28 +0,0 @@ -# Generated by Django 5.1 on 2024-12-06 13:59 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0009_transaction'), - ] - - operations = [ - migrations.AlterModelOptions( - name='transaction', - options={}, - ), - migrations.AddField( - model_name='transaction', - name='block_hash', - field=models.CharField(default='0x00', max_length=256), - preserve_default=False, - ), - migrations.AlterField( - model_name='transaction', - name='block_number', - field=models.CharField(max_length=256), - ), - ] diff --git a/inethi/core/migrations/0011_alter_transaction_sender.py b/inethi/core/migrations/0011_alter_transaction_sender.py deleted file mode 100644 index ddddd8a..0000000 --- a/inethi/core/migrations/0011_alter_transaction_sender.py +++ /dev/null @@ -1,20 +0,0 @@ -# Generated by Django 5.1 on 2025-02-03 10:25 - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0010_alter_transaction_options_transaction_block_hash_and_more'), - ] - - operations = [ - migrations.AlterField( - model_name='transaction', - name='sender', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sent_transactions', to=settings.AUTH_USER_MODEL), - ), - ] diff --git a/inethi/core/migrations/0012_transaction_sender_address_transaction_token_and_more.py b/inethi/core/migrations/0012_transaction_sender_address_transaction_token_and_more.py deleted file mode 100644 index c21c736..0000000 --- a/inethi/core/migrations/0012_transaction_sender_address_transaction_token_and_more.py +++ /dev/null @@ -1,48 +0,0 @@ -# Generated by Django 5.1 on 2025-02-03 10:31 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0011_alter_transaction_sender'), - ] - - operations = [ - migrations.AddField( - model_name='transaction', - name='sender_address', - field=models.CharField(blank=True, max_length=256, null=True), - ), - migrations.AddField( - model_name='transaction', - name='token', - field=models.CharField(blank=True, max_length=255, null=True), - ), - migrations.AlterField( - model_name='transaction', - name='block_hash', - field=models.CharField(blank=True, max_length=256, null=True), - ), - migrations.AlterField( - model_name='transaction', - name='block_number', - field=models.CharField(blank=True, max_length=256, null=True), - ), - migrations.AlterField( - model_name='transaction', - name='category', - field=models.CharField(choices=[('TRANSFER', 'Transfer'), ('INTERNET_COUPON', 'Internet Coupon'), ('REWARD', 'Reward'), ('PAYMENT', 'Payment'), ('OTHER', 'Other')], default='TRANSFER', max_length=50), - ), - migrations.AlterField( - model_name='transaction', - name='gas_used', - field=models.DecimalField(blank=True, decimal_places=8, max_digits=18, null=True), - ), - migrations.AlterField( - model_name='transaction', - name='transaction_hash', - field=models.CharField(blank=True, max_length=256, null=True), - ), - ] diff --git a/inethi/core/migrations/0013_alter_user_options.py b/inethi/core/migrations/0013_alter_user_options.py deleted file mode 100644 index 9fbfc66..0000000 --- a/inethi/core/migrations/0013_alter_user_options.py +++ /dev/null @@ -1,17 +0,0 @@ -# Generated by Django 5.1 on 2025-03-19 15:17 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('core', '0012_transaction_sender_address_transaction_token_and_more'), - ] - - operations = [ - migrations.AlterModelOptions( - name='user', - options={'permissions': (('network_admin', 'Can administer the network'),)}, - ), - ] diff --git a/inethi/network/migrations/0002_alter_ping_primary_key.py b/inethi/network/migrations/0002_alter_ping_primary_key.py deleted file mode 100644 index ea96cd2..0000000 --- a/inethi/network/migrations/0002_alter_ping_primary_key.py +++ /dev/null @@ -1,26 +0,0 @@ -# 0002_alter_ping_primary_key.py - -from django.db import migrations - -class Migration(migrations.Migration): - atomic = False # Altering primary keys typically cannot run inside a transaction - - dependencies = [ - ('network', '0001_initial'), - ] - - operations = [ - migrations.RunSQL( - sql=""" - -- Drop the automatically created primary key constraint. - ALTER TABLE network_ping DROP CONSTRAINT network_ping_pkey; - -- Create a composite primary key including the partitioning column (timestamp) and the id. - ALTER TABLE network_ping ADD PRIMARY KEY (timestamp, id); - """, - reverse_sql=""" - -- Reverse: Drop the composite primary key and restore the original primary key on id. - ALTER TABLE network_ping DROP CONSTRAINT network_ping_pkey; - ALTER TABLE network_ping ADD PRIMARY KEY (id); - """, - ), - ] \ No newline at end of file diff --git a/inethi/network/migrations/0003_make_ping_hypertable.py b/inethi/network/migrations/0003_make_ping_hypertable.py deleted file mode 100644 index 6423385..0000000 --- a/inethi/network/migrations/0003_make_ping_hypertable.py +++ /dev/null @@ -1,25 +0,0 @@ -# 0003_make_ping_hypertable.py - -from django.db import migrations - -class Migration(migrations.Migration): - atomic = False # The hypertable conversion must run outside of a transaction. - - dependencies = [ - ('network', '0002_alter_ping_primary_key'), - ] - - operations = [ - migrations.RunSQL( - sql=""" - SELECT create_hypertable( - 'network_ping', - 'timestamp', - if_not_exists => TRUE - ); - """, - reverse_sql=""" - SELECT drop_hypertable('network_ping'::regclass); - """, - ), - ] \ No newline at end of file diff --git a/inethi/network/migrations/0004_create_continuous_aggregates_all.py b/inethi/network/migrations/0004_create_continuous_aggregates_all.py deleted file mode 100644 index 55c840a..0000000 --- a/inethi/network/migrations/0004_create_continuous_aggregates_all.py +++ /dev/null @@ -1,243 +0,0 @@ -from django.db import migrations - -class Migration(migrations.Migration): - atomic = False - dependencies = [ - ('network', '0003_make_ping_hypertable'), - ] - - operations = [ - # 15-minute aggregate (unchanged) - migrations.RunSQL( - sql=""" - CREATE MATERIALIZED VIEW network_ping_aggregate_15m - WITH (timescaledb.continuous) AS - SELECT - time_bucket('15 minutes', timestamp) AS bucket, - host_id, - AVG(CASE WHEN is_alive THEN 1.0 ELSE 0.0 END) * 100 AS uptime_percentage, - COUNT(*) AS total_pings - FROM network_ping - GROUP BY bucket, host_id; - """, - reverse_sql="DROP MATERIALIZED VIEW IF EXISTS network_ping_aggregate_15m;" - ), - migrations.RunSQL( - sql=""" - SELECT add_continuous_aggregate_policy('network_ping_aggregate_15m', - start_offset => INTERVAL '1 hour', - end_offset => INTERVAL '1 minute', - schedule_interval => INTERVAL '1 minute' - ); - """, - reverse_sql="SELECT remove_continuous_aggregate_policy('network_ping_aggregate_15m');" - ), - - # 60-minute aggregate - migrations.RunSQL( - sql=""" - CREATE MATERIALIZED VIEW network_ping_aggregate_60m - WITH (timescaledb.continuous) AS - SELECT - time_bucket('60 minutes', timestamp) AS bucket, - host_id, - AVG(CASE WHEN is_alive THEN 1.0 ELSE 0.0 END) * 100 AS uptime_percentage, - COUNT(*) AS total_pings - FROM network_ping - GROUP BY bucket, host_id; - """, - reverse_sql="DROP MATERIALIZED VIEW IF EXISTS network_ping_aggregate_60m;" - ), - migrations.RunSQL( - sql=""" - SELECT add_continuous_aggregate_policy('network_ping_aggregate_60m', - start_offset => INTERVAL '3 hours', - end_offset => INTERVAL '1 minute', - schedule_interval => INTERVAL '1 minute' - ); - """, - reverse_sql="SELECT remove_continuous_aggregate_policy('network_ping_aggregate_60m');" - ), - - # 6-hour aggregate - migrations.RunSQL( - sql=""" - CREATE MATERIALIZED VIEW network_ping_aggregate_6h - WITH (timescaledb.continuous) AS - SELECT - time_bucket('6 hours', timestamp) AS bucket, - host_id, - AVG(CASE WHEN is_alive THEN 1.0 ELSE 0.0 END) * 100 AS uptime_percentage, - COUNT(*) AS total_pings - FROM network_ping - GROUP BY bucket, host_id; - """, - reverse_sql="DROP MATERIALIZED VIEW IF EXISTS network_ping_aggregate_6h;" - ), - migrations.RunSQL( - sql=""" - SELECT add_continuous_aggregate_policy('network_ping_aggregate_6h', - start_offset => INTERVAL '13 hours', - end_offset => INTERVAL '10 minutes', - schedule_interval => INTERVAL '30 minutes' - ); - """, - reverse_sql="SELECT remove_continuous_aggregate_policy('network_ping_aggregate_6h');" - ), - - # 12-hour aggregate - migrations.RunSQL( - sql=""" - CREATE MATERIALIZED VIEW network_ping_aggregate_12h - WITH (timescaledb.continuous) AS - SELECT - time_bucket('12 hours', timestamp) AS bucket, - host_id, - AVG(CASE WHEN is_alive THEN 1.0 ELSE 0.0 END) * 100 AS uptime_percentage, - COUNT(*) AS total_pings - FROM network_ping - GROUP BY bucket, host_id; - """, - reverse_sql="DROP MATERIALIZED VIEW IF EXISTS network_ping_aggregate_12h;" - ), - migrations.RunSQL( - sql=""" - SELECT add_continuous_aggregate_policy('network_ping_aggregate_12h', - start_offset => INTERVAL '25 hours', - end_offset => INTERVAL '10 minutes', - schedule_interval => INTERVAL '1 hour' - ); - """, - reverse_sql="SELECT remove_continuous_aggregate_policy('network_ping_aggregate_12h');" - ), - - # 24-hour aggregate (1 day) - migrations.RunSQL( - sql=""" - CREATE MATERIALIZED VIEW network_ping_aggregate_24h - WITH (timescaledb.continuous) AS - SELECT - time_bucket('1 day', timestamp) AS bucket, - host_id, - AVG(CASE WHEN is_alive THEN 1.0 ELSE 0.0 END) * 100 AS uptime_percentage, - COUNT(*) AS total_pings - FROM network_ping - GROUP BY bucket, host_id; - """, - reverse_sql="DROP MATERIALIZED VIEW IF EXISTS network_ping_aggregate_24h;" - ), - migrations.RunSQL( - sql=""" - SELECT add_continuous_aggregate_policy('network_ping_aggregate_24h', - start_offset => INTERVAL '3 days', - end_offset => INTERVAL '1 hour', - schedule_interval => INTERVAL '2 hours' - ); - """, - reverse_sql="SELECT remove_continuous_aggregate_policy('network_ping_aggregate_24h');" - ), - - # 7-day aggregate - migrations.RunSQL( - sql=""" - CREATE MATERIALIZED VIEW network_ping_aggregate_7d - WITH (timescaledb.continuous) AS - SELECT - time_bucket('7 days', timestamp) AS bucket, - host_id, - AVG(CASE WHEN is_alive THEN 1.0 ELSE 0.0 END) * 100 AS uptime_percentage, - COUNT(*) AS total_pings - FROM network_ping - GROUP BY bucket, host_id; - """, - reverse_sql="DROP MATERIALIZED VIEW IF EXISTS network_ping_aggregate_7d;" - ), - migrations.RunSQL( - sql=""" - SELECT add_continuous_aggregate_policy('network_ping_aggregate_7d', - start_offset => INTERVAL '15 days', - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '6 hours' - ); - """, - reverse_sql="SELECT remove_continuous_aggregate_policy('network_ping_aggregate_7d');" - ), - - # 30-day aggregate (1 month) - migrations.RunSQL( - sql=""" - CREATE MATERIALIZED VIEW network_ping_aggregate_30d - WITH (timescaledb.continuous) AS - SELECT - time_bucket('30 days', timestamp) AS bucket, - host_id, - AVG(CASE WHEN is_alive THEN 1.0 ELSE 0.0 END) * 100 AS uptime_percentage, - COUNT(*) AS total_pings - FROM network_ping - GROUP BY bucket, host_id; - """, - reverse_sql="DROP MATERIALIZED VIEW IF EXISTS network_ping_aggregate_30d;" - ), - migrations.RunSQL( - sql=""" - SELECT add_continuous_aggregate_policy('network_ping_aggregate_30d', - start_offset => INTERVAL '61 days', - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '12 hours' - ); - """, - reverse_sql="SELECT remove_continuous_aggregate_policy('network_ping_aggregate_30d');" - ), - - # 90-day aggregate - migrations.RunSQL( - sql=""" - CREATE MATERIALIZED VIEW network_ping_aggregate_90d - WITH (timescaledb.continuous) AS - SELECT - time_bucket('90 days', timestamp) AS bucket, - host_id, - AVG(CASE WHEN is_alive THEN 1.0 ELSE 0.0 END) * 100 AS uptime_percentage, - COUNT(*) AS total_pings - FROM network_ping - GROUP BY bucket, host_id; - """, - reverse_sql="DROP MATERIALIZED VIEW IF EXISTS network_ping_aggregate_90d;" - ), - migrations.RunSQL( - sql=""" - SELECT add_continuous_aggregate_policy('network_ping_aggregate_90d', - start_offset => INTERVAL '181 days', - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '1 day' - ); - """, - reverse_sql="SELECT remove_continuous_aggregate_policy('network_ping_aggregate_90d');" - ), - - # 365-day aggregate - migrations.RunSQL( - sql=""" - CREATE MATERIALIZED VIEW network_ping_aggregate_365d - WITH (timescaledb.continuous) AS - SELECT - time_bucket('365 days', timestamp) AS bucket, - host_id, - AVG(CASE WHEN is_alive THEN 1.0 ELSE 0.0 END) * 100 AS uptime_percentage, - COUNT(*) AS total_pings - FROM network_ping - GROUP BY bucket, host_id; - """, - reverse_sql="DROP MATERIALIZED VIEW IF EXISTS network_ping_aggregate_365d;" - ), - migrations.RunSQL( - sql=""" - SELECT add_continuous_aggregate_policy('network_ping_aggregate_365d', - start_offset => INTERVAL '731 days', - end_offset => INTERVAL '1 day', - schedule_interval => INTERVAL '7 days' - ); - """, - reverse_sql="SELECT remove_continuous_aggregate_policy('network_ping_aggregate_365d');" - ), - ] diff --git a/inethi/network/migrations/0005_host_device_type.py b/inethi/network/migrations/0005_host_device_type.py deleted file mode 100644 index bf5442b..0000000 --- a/inethi/network/migrations/0005_host_device_type.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1 on 2025-02-11 07:39 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('network', '0004_create_continuous_aggregates_all'), - ] - - operations = [ - migrations.AddField( - model_name='host', - name='device_type', - field=models.CharField(choices=[('unknown', 'Unknown'), ('dns_server', 'DNS Server'), ('server', 'Server'), ('firewall', 'Firewall'), ('access_point', 'Access Point'), ('switch', 'Switch')], default='unknown', help_text='Select the type of device (DNS Server, Server, Firewall, Access Point, Switch)', max_length=20), - ), - ] diff --git a/inethi/network/migrations/0006_network_host_network_ping_network.py b/inethi/network/migrations/0006_network_host_network_ping_network.py deleted file mode 100644 index 817c138..0000000 --- a/inethi/network/migrations/0006_network_host_network_ping_network.py +++ /dev/null @@ -1,35 +0,0 @@ -# Generated by Django 5.1 on 2025-03-20 10:43 - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('network', '0005_host_device_type'), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.CreateModel( - name='Network', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, unique=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('admin', models.ForeignKey(help_text='User who administrates this network', on_delete=django.db.models.deletion.CASCADE, related_name='networks', to=settings.AUTH_USER_MODEL)), - ], - ), - migrations.AddField( - model_name='host', - name='network', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='hosts', to='network.network'), - ), - migrations.AddField( - model_name='ping', - name='network', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='pings', to='network.network'), - ), - ] diff --git a/inethi/network/migrations/0007_alter_host_ip_address_alter_host_mac_address_and_more.py b/inethi/network/migrations/0007_alter_host_ip_address_alter_host_mac_address_and_more.py deleted file mode 100644 index ad3809f..0000000 --- a/inethi/network/migrations/0007_alter_host_ip_address_alter_host_mac_address_and_more.py +++ /dev/null @@ -1,28 +0,0 @@ -# Generated by Django 5.1 on 2025-03-21 10:54 - -import django.core.validators -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('network', '0006_network_host_network_ping_network'), - ] - - operations = [ - migrations.AlterField( - model_name='host', - name='ip_address', - field=models.GenericIPAddressField(), - ), - migrations.AlterField( - model_name='host', - name='mac_address', - field=models.CharField(blank=True, max_length=17, null=True, validators=[django.core.validators.RegexValidator(message='Enter a valid MAC address in format XX:XX:XX:XX:XX:XX.', regex='^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$')]), - ), - migrations.AlterUniqueTogether( - name='host', - unique_together={('network', 'ip_address'), ('network', 'mac_address')}, - ), - ] diff --git a/inethi/network/migrations/0008_alter_network_name_alter_network_unique_together.py b/inethi/network/migrations/0008_alter_network_name_alter_network_unique_together.py deleted file mode 100644 index cd98398..0000000 --- a/inethi/network/migrations/0008_alter_network_name_alter_network_unique_together.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by Django 5.1 on 2025-03-21 12:43 - -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('network', '0007_alter_host_ip_address_alter_host_mac_address_and_more'), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.AlterField( - model_name='network', - name='name', - field=models.CharField(max_length=255), - ), - migrations.AlterUniqueTogether( - name='network', - unique_together={('name', 'admin')}, - ), - ] diff --git a/inethi/network/migrations/0009_alter_host_unique_together.py b/inethi/network/migrations/0009_alter_host_unique_together.py deleted file mode 100644 index 3e80a5c..0000000 --- a/inethi/network/migrations/0009_alter_host_unique_together.py +++ /dev/null @@ -1,17 +0,0 @@ -# Generated by Django 5.1 on 2025-03-21 12:59 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('network', '0008_alter_network_name_alter_network_unique_together'), - ] - - operations = [ - migrations.AlterUniqueTogether( - name='host', - unique_together={('network', 'ip_address')}, - ), - ] diff --git a/inethi/radiusdesk/migrations/0002_alter_radiusdeskinstance_token.py b/inethi/radiusdesk/migrations/0002_alter_radiusdeskinstance_token.py deleted file mode 100644 index 3e287fb..0000000 --- a/inethi/radiusdesk/migrations/0002_alter_radiusdeskinstance_token.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1 on 2025-01-23 18:46 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('radiusdesk', '0001_initial'), - ] - - operations = [ - migrations.AlterField( - model_name='radiusdeskinstance', - name='token', - field=models.CharField(blank=True, default='', max_length=255), - ), - ] diff --git a/inethi/radiusdesk/migrations/0003_rename_realm_realm_cloud.py b/inethi/radiusdesk/migrations/0003_rename_realm_realm_cloud.py deleted file mode 100644 index 42ab58d..0000000 --- a/inethi/radiusdesk/migrations/0003_rename_realm_realm_cloud.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1 on 2025-01-24 10:59 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('radiusdesk', '0002_alter_radiusdeskinstance_token'), - ] - - operations = [ - migrations.RenameField( - model_name='realm', - old_name='realm', - new_name='cloud', - ), - ] diff --git a/inethi/radiusdesk/migrations/0004_voucher_wallet_address_alter_voucher_user.py b/inethi/radiusdesk/migrations/0004_voucher_wallet_address_alter_voucher_user.py deleted file mode 100644 index 2832afe..0000000 --- a/inethi/radiusdesk/migrations/0004_voucher_wallet_address_alter_voucher_user.py +++ /dev/null @@ -1,26 +0,0 @@ -# Generated by Django 5.1 on 2025-02-03 10:31 - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('radiusdesk', '0003_rename_realm_realm_cloud'), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.AddField( - model_name='voucher', - name='wallet_address', - field=models.CharField(blank=True, max_length=255, null=True), - ), - migrations.AlterField( - model_name='voucher', - name='user', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='vouchers', to=settings.AUTH_USER_MODEL), - ), - ] diff --git a/inethi/radiusdesk/migrations/0005_radiusdeskinstance_administrators.py b/inethi/radiusdesk/migrations/0005_radiusdeskinstance_administrators.py deleted file mode 100644 index 1a2b16e..0000000 --- a/inethi/radiusdesk/migrations/0005_radiusdeskinstance_administrators.py +++ /dev/null @@ -1,20 +0,0 @@ -# Generated by Django 5.1 on 2025-03-19 15:55 - -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('radiusdesk', '0004_voucher_wallet_address_alter_voucher_user'), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.AddField( - model_name='radiusdeskinstance', - name='administrators', - field=models.ManyToManyField(blank=True, help_text='Users who have network administrator rights for this instance', related_name='admin_radiusdesk_instances', to=settings.AUTH_USER_MODEL), - ), - ] diff --git a/inethi/radiusdesk/migrations/0006_radiusdeskinstance_accepts_crypto.py b/inethi/radiusdesk/migrations/0006_radiusdeskinstance_accepts_crypto.py deleted file mode 100644 index 9c773ba..0000000 --- a/inethi/radiusdesk/migrations/0006_radiusdeskinstance_accepts_crypto.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1 on 2025-03-22 12:18 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('radiusdesk', '0005_radiusdeskinstance_administrators'), - ] - - operations = [ - migrations.AddField( - model_name='radiusdeskinstance', - name='accepts_crypto', - field=models.BooleanField(default=False), - ), - ] diff --git a/inethi/radiusdesk/migrations/0007_radiusdeskprofile_cost.py b/inethi/radiusdesk/migrations/0007_radiusdeskprofile_cost.py deleted file mode 100644 index c90a721..0000000 --- a/inethi/radiusdesk/migrations/0007_radiusdeskprofile_cost.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1 on 2025-03-22 12:22 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('radiusdesk', '0006_radiusdeskinstance_accepts_crypto'), - ] - - operations = [ - migrations.AddField( - model_name='radiusdeskprofile', - name='cost', - field=models.FloatField(default=0), - ), - ] diff --git a/inethi/reward/migrations/0002_reward_interval_minutes.py b/inethi/reward/migrations/0002_reward_interval_minutes.py deleted file mode 100644 index 73d3bc6..0000000 --- a/inethi/reward/migrations/0002_reward_interval_minutes.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 5.1 on 2025-02-24 12:13 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('reward', '0001_initial'), - ] - - operations = [ - migrations.AddField( - model_name='reward', - name='interval_minutes', - field=models.IntegerField(blank=True, help_text='Interval for recurring rewards in minutes', null=True), - ), - ] diff --git a/inethi/reward/migrations/0003_alter_uptimerewardtransaction_reward.py b/inethi/reward/migrations/0003_alter_uptimerewardtransaction_reward.py deleted file mode 100644 index 1f2e9d2..0000000 --- a/inethi/reward/migrations/0003_alter_uptimerewardtransaction_reward.py +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by Django 5.1 on 2025-02-24 14:29 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('reward', '0002_reward_interval_minutes'), - ] - - operations = [ - migrations.AlterField( - model_name='uptimerewardtransaction', - name='reward', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reward_transactions', to='reward.reward'), - ), - ] diff --git a/inethi/reward/migrations/0004_reward_network.py b/inethi/reward/migrations/0004_reward_network.py deleted file mode 100644 index f41dc22..0000000 --- a/inethi/reward/migrations/0004_reward_network.py +++ /dev/null @@ -1,20 +0,0 @@ -# Generated by Django 5.1 on 2025-03-20 11:05 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('network', '0006_network_host_network_ping_network'), - ('reward', '0003_alter_uptimerewardtransaction_reward'), - ] - - operations = [ - migrations.AddField( - model_name='reward', - name='network', - field=models.ForeignKey(blank=True, help_text='Network associated with this reward', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='rewards', to='network.network'), - ), - ] diff --git a/inethi/transaction/tests/test_transaction_api.py b/inethi/transaction/tests/test_transaction_api.py deleted file mode 100644 index 8f85b56..0000000 --- a/inethi/transaction/tests/test_transaction_api.py +++ /dev/null @@ -1,291 +0,0 @@ -from unittest.mock import patch -from django.contrib.auth import get_user_model -from django.urls import reverse -from django.test import TestCase -from rest_framework.test import APIClient -from rest_framework import status - -from core.models import ( - Transaction -) - -# Standard list view -TRANSACTION_LIST_URL = reverse( - 'transaction:transaction-list' -) # Output: /api/v1/transactions/ - -# Custom action -TRANSACTION_BY_USER_URL = reverse( - 'transaction:transaction-list-by-user' -) # Output: /api/v1/transactions/by-user/ - - -def detail_url(transaction_id): - """Return smart contract detail URL""" - return reverse( - 'transaction:transaction-detail', - args=[transaction_id] - ) - - -def create_user(**params): - """Helper function to create a user""" - return get_user_model().objects.create_user(**params) - - -class PublicSmartContractApiTests(TestCase): - """Test unauthenticated smart contract API access""" - - def setUp(self): - self.client = APIClient() - - def test_auth_required(self): - """ - Test that authentication is required - for accessing smart contracts - """ - res = self.client.get(TRANSACTION_LIST_URL) - self.assertEqual(res.status_code, status.HTTP_403_FORBIDDEN) - res = self.client.get(TRANSACTION_BY_USER_URL) - self.assertEqual(res.status_code, status.HTTP_403_FORBIDDEN) - - -class PrivateAPITests(TestCase): - """Test the private API""" - def setUp(self): - self.client = APIClient() - - patcher = patch('inethi.settings.KEYCLOAK_ADMIN.create_user') - self.mock_keycloak_create_user = patcher.start() - self.mock_keycloak_create_user.return_value = 'mock-keycloak-user-id' - self.user = create_user( - email='test@example.com', - password='testpass123', - username='testuser', - ) - self.client.force_authenticate(self.user) - self.addCleanup(patcher.stop) - - def test_list_transactions(self): - """Test listing transactions for a user""" - other_user_1 = create_user( - email='other_1@example.com', - password='password123', - username='other_1_username', - first_name='other_1 First Name', - last_name='other_1 Last Name', - ) - - other_user_2 = create_user( - email='other_2@example.com', - password='password123', - username='other_2_username', - first_name='other_2 First Name', - last_name='other_2 Last Name', - ) - - transaction_user = Transaction.objects.create( - sender=self.user, - recipient=other_user_1, - - recipient_address='mock address', - amount='10.1', - transaction_hash='mock hash', - block_number=123, - gas_used='10.2', - category='Transfer' - ) - Transaction.objects.create( - sender=other_user_2, - recipient=other_user_1, - - recipient_address='mock address 2', - amount='10.1', - transaction_hash='mock hash 2', - block_number=123, - gas_used='10.2', - category='Transfer' - ) - url = TRANSACTION_BY_USER_URL - response = self.client.get(url) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual( - len(response.data), - 1 - ) # Only one transaction should be visible - - # Assert that other_user_2 is not in the returned data - for transaction in response.data: - self.assertNotEqual(transaction['sender'], other_user_2.id) - self.assertNotEqual(transaction['recipient'], other_user_2.id) - - # Assert that the returned transaction matches the expected one - self.assertEqual(response.data[0]['id'], transaction_user.id) - self.assertEqual( - response.data[0]['recipient_address'], - transaction_user.recipient_address - ) - - def test_list_transactions_empty(self): - """ - Test that no transactions are returned for - a user with no transactions - """ - url = TRANSACTION_LIST_URL - other_user_1 = create_user( - email='other_1@example.com', - password='password123', - username='other_1_username', - first_name='other_1 First Name', - last_name='other_1 Last Name', - ) - - other_user_2 = create_user( - email='other_2@example.com', - password='password123', - username='other_2_username', - first_name='other_2 First Name', - last_name='other_2 Last Name', - ) - - Transaction.objects.create( - sender=other_user_2, - recipient=other_user_1, - - recipient_address='mock address 2', - amount='10.1', - transaction_hash='mock hash 2', - block_number=123, - gas_used='10.2', - category='Transfer' - ) - - response = self.client.get(url) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 0) - - def test_list_transactions_multiple(self): - """Test listing multiple transactions involving a user""" - other_user = create_user( - email='other@example.com', - password='password123', - username='other_username', - ) - - transaction_1 = Transaction.objects.create( - sender=self.user, - recipient=other_user, - recipient_address='mock address 1', - amount='5.0', - transaction_hash='mock hash 1', - block_number=101, - gas_used='1.0', - category='Transfer' - ) - transaction_2 = Transaction.objects.create( - sender=other_user, - recipient=self.user, - recipient_address='mock address 2', - amount='7.0', - transaction_hash='mock hash 2', - block_number=102, - gas_used='2.0', - category='Transfer' - ) - - url = TRANSACTION_BY_USER_URL - response = self.client.get(url) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 2) - - transaction_ids = [t['id'] for t in response.data] - self.assertIn(transaction_1.id, transaction_ids) - self.assertIn(transaction_2.id, transaction_ids) - - def test_transaction_detail_unauthorized(self): - """Test that unauthorized access to a transaction is forbidden""" - other_user = create_user( - email='other@example.com', - password='password123', - username='other_username', - ) - - transaction = Transaction.objects.create( - sender=other_user, - recipient=other_user, - recipient_address='mock address', - amount='10.0', - transaction_hash='mock hash', - block_number=101, - gas_used='1.0', - category='Transfer' - ) - - url = reverse('transaction:transaction-detail', args=[transaction.id]) - response = self.client.get(url) - - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - - -class AdminTransactionAPITests(TestCase): - def setUp(self): - self.client = APIClient() - - patcher = patch('inethi.settings.KEYCLOAK_ADMIN.create_user') - self.mock_keycloak_create_user = patcher.start() - self.mock_keycloak_create_user.return_value = 'mock-keycloak-user-id' - self.user = create_user( - email='test@example.com', - password='testpass123', - username='testuser', - is_staff=True, - is_superuser=True, - ) - self.client.force_authenticate(self.user) - self.addCleanup(patcher.stop) - - def test_list_transactions(self): - """Test listing transactions for admin user returns all transactions""" - other_user_1 = create_user( - email='other_1@example.com', - password='password123', - username='other_1_username', - first_name='other_1 First Name', - last_name='other_1 Last Name', - ) - - other_user_2 = create_user( - email='other_2@example.com', - password='password123', - username='other_2_username', - first_name='other_2 First Name', - last_name='other_2 Last Name', - ) - - Transaction.objects.create( - sender=self.user, - recipient=other_user_1, - - recipient_address='mock address', - amount='10.1', - transaction_hash='mock hash', - block_number=123, - gas_used='10.2', - category='Transfer' - ) - Transaction.objects.create( - sender=other_user_2, - recipient=other_user_1, - - recipient_address='mock address 2', - amount='10.1', - transaction_hash='mock hash 2', - block_number=123, - gas_used='10.2', - category='Transfer' - ) - url = TRANSACTION_BY_USER_URL - response = self.client.get(url) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data), 2) From 1e03323020d4736f6242f22f85a7e359f5bbc099 Mon Sep 17 00:00:00 2001 From: Keegan White Date: Fri, 22 Aug 2025 15:55:08 +0200 Subject: [PATCH 4/7] Add key generation utility script - Introduced `generate_keys.py` to generate Django secret key and encryption key for Inethi backend setup. - The script provides usage instructions and security notes for handling the generated keys. - Includes error handling and troubleshooting guidance for missing dependencies. --- generate_keys.py | 73 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100755 generate_keys.py diff --git a/generate_keys.py b/generate_keys.py new file mode 100755 index 0000000..cae4782 --- /dev/null +++ b/generate_keys.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 +""" +Utility script to generate Django secret key and encryption key for Inethi backend setup. +Run this script to get the keys needed for your .env file. +""" + +import sys +from cryptography.fernet import Fernet + +def generate_django_secret_key(): + """Generate a Django secret key.""" + try: + from django.core.management.utils import get_random_secret_key + return get_random_secret_key() + except ImportError: + print("Warning: Django not available, generating a random key instead.") + import secrets + import string + chars = string.ascii_letters + string.digits + "!@#$%^&*(-_=+)" + return ''.join(secrets.choice(chars) for _ in range(50)) + +def generate_encryption_key(): + """Generate an encryption key for Fernet.""" + return Fernet.generate_key().decode() + +def main(): + print("=" * 60) + print("🔑 Inethi Backend Key Generator") + print("=" * 60) + print() + + try: + # Generate Django secret key + print("📝 Generating Django Secret Key...") + django_key = generate_django_secret_key() + print(f"✅ Django Secret Key:") + print(f" {django_key}") + print() + + # Generate encryption key + print("🔐 Generating Encryption Key...") + encryption_key = generate_encryption_key() + print(f"✅ Encryption Key:") + print(f" {encryption_key}") + print() + + # Display usage instructions + print("=" * 60) + print("📋 Usage Instructions:") + print("=" * 60) + print() + print("Add these keys to your .env file:") + print() + print(f"DJANGO_SECRET_KEY={django_key}") + print(f"ENCRYPTION_KEY={encryption_key}") + print() + print("⚠️ Important Security Notes:") + print(" • Keep these keys secure and never commit them to version control") + print(" • Use different keys for development and production") + print(" • Store production keys securely (e.g., environment variables)") + print() + print("🎉 Keys generated successfully!") + + except Exception as e: + print(f"❌ Error generating keys: {e}") + print() + print("💡 Troubleshooting:") + print(" • Make sure you have the required packages installed:") + print(" pip install cryptography django") + sys.exit(1) + +if __name__ == "__main__": + main() From 3444621f192dd1f6479daeec1799630093aae2e9 Mon Sep 17 00:00:00 2001 From: Keegan White Date: Fri, 22 Aug 2025 16:31:43 +0200 Subject: [PATCH 5/7] Update .env.example and users.json with Traefik settings and user details - Added Traefik configuration variables to .env.example for backend setup. - Enhanced users.json by including first and last names for network administrators to improve user data completeness. --- .env.example | 8 +++++++- inethi/users.json | 4 ++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.env.example b/.env.example index 0ad5075..24aba5b 100644 --- a/.env.example +++ b/.env.example @@ -43,4 +43,10 @@ FAUCET_AND_INDEX_ENABLED="False" # this enables the calls to account index and F RADIUS_DESK_USERNAME= RADIUS_DESK_PASSWORD= # Celery config -CELERY_BROKER_URL="redis://redis:6379/0" \ No newline at end of file +CELERY_BROKER_URL="redis://redis:6379/0" +# =============== +# Traefik Configuration +TRAEFIK_BACKEND_HOST="backend.inethicloud.net" +TRAEFIK_ENTRYPOINTS="websecure" +TRAEFIK_CERTRESOLVER="letsencrypt" +TRAEFIK_NETWORK_BRIDGE="inethi-bridge-traefik" \ No newline at end of file diff --git a/inethi/users.json b/inethi/users.json index acbe81a..e0a39ac 100644 --- a/inethi/users.json +++ b/inethi/users.json @@ -2,11 +2,15 @@ { "username": "network_admin_1", "password": "password1", + "first_name": "Network", + "last_name": "Admin1", "email": "network_admin_1@inethi.com" }, { "username": "network_admin_2", "password": "password2", + "first_name": "Network", + "last_name": "Admin2", "email": "network_admin_2@inethi.com" } ] From 5a5d36bdb490df2f97e16bec4b7b41976fc8887c Mon Sep 17 00:00:00 2001 From: Keegan White Date: Fri, 22 Aug 2025 17:01:31 +0200 Subject: [PATCH 6/7] Update .env.example and setup.sh for user creation and configuration - Added SUPERUSER_USERNAME and SUPERUSER_PASSWORD placeholders to .env.example for superuser setup. - Simplified user and smart contract creation commands in setup.sh by removing unnecessary path prefixes for JSON files. --- .env.example | 5 ++++- setup.sh | 16 ++++++++-------- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/.env.example b/.env.example index 24aba5b..c30b50a 100644 --- a/.env.example +++ b/.env.example @@ -49,4 +49,7 @@ CELERY_BROKER_URL="redis://redis:6379/0" TRAEFIK_BACKEND_HOST="backend.inethicloud.net" TRAEFIK_ENTRYPOINTS="websecure" TRAEFIK_CERTRESOLVER="letsencrypt" -TRAEFIK_NETWORK_BRIDGE="inethi-bridge-traefik" \ No newline at end of file +TRAEFIK_NETWORK_BRIDGE="inethi-bridge-traefik" + +SUPERUSER_USERNAME= +SUPERUSER_PASSWORD= \ No newline at end of file diff --git a/setup.sh b/setup.sh index dad1a2c..a53189c 100755 --- a/setup.sh +++ b/setup.sh @@ -343,8 +343,8 @@ print_color $BLUE "Running management commands to populate the database..." # Create users from JSON print_color $BLUE "Creating users from users.json..." -if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_users_from_json inethi/users.json 2>/dev/null || \ - docker compose -f docker-compose-prod.yml exec -T app python manage.py create_users_from_json inethi/users.json 2>/dev/null; then +if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_users_from_json users.json 2>/dev/null || \ + docker compose -f docker-compose-prod.yml exec -T app python manage.py create_users_from_json users.json 2>/dev/null; then print_color $GREEN "✓ Users created successfully" else print_color $YELLOW "Warning: Failed to create users (they might already exist)" @@ -352,8 +352,8 @@ fi # Create smart contracts from JSON print_color $BLUE "Creating smart contracts from smart_contracts.json..." -if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_smart_contracts_from_json inethi/smart_contracts.json 2>/dev/null || \ - docker compose -f docker-compose-prod.yml exec -T app python manage.py create_smart_contracts_from_json inethi/smart_contracts.json 2>/dev/null; then +if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_smart_contracts_from_json smart_contracts.json 2>/dev/null || \ + docker compose -f docker-compose-prod.yml exec -T app python manage.py create_smart_contracts_from_json smart_contracts.json 2>/dev/null; then print_color $GREEN "✓ Smart contracts created successfully" else print_color $YELLOW "Warning: Failed to create smart contracts (they might already exist)" @@ -361,8 +361,8 @@ fi # Create RADIUSdesk instances from JSON print_color $BLUE "Creating RADIUSdesk instances from sample_radiusdesk_config.json..." -if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_radiusdesk_from_json inethi/sample_radiusdesk_config.json 2>/dev/null || \ - docker compose -f docker-compose-prod.yml exec -T app python manage.py create_radiusdesk_from_json inethi/sample_radiusdesk_config.json 2>/dev/null; then +if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_radiusdesk_from_json sample_radiusdesk_config.json 2>/dev/null || \ + docker compose -f docker-compose-prod.yml exec -T app python manage.py create_radiusdesk_from_json sample_radiusdesk_config.json 2>/dev/null; then print_color $GREEN "✓ RADIUSdesk instances created successfully" else print_color $YELLOW "Warning: Failed to create RADIUSdesk instances (they might already exist)" @@ -370,8 +370,8 @@ fi # Create superuser print_color $BLUE "Creating superuser..." -if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_superuser --noinput 2>/dev/null || \ - docker compose -f docker-compose-prod.yml exec -T app python manage.py create_superuser --noinput 2>/dev/null; then +if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_superuser 2>/dev/null || \ + docker compose -f docker-compose-prod.yml exec -T app python manage.py create_superuser 2>/dev/null; then print_color $GREEN "✓ Superuser created successfully" else print_color $YELLOW "Warning: Failed to create superuser (might already exist or need manual creation)" From a7fc422c8769999a75e3e7ffc707fdd103119ee2 Mon Sep 17 00:00:00 2001 From: Keegan White Date: Fri, 22 Aug 2025 17:25:36 +0200 Subject: [PATCH 7/7] fixed ordering of managmeent commands --- setup.sh | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/setup.sh b/setup.sh index a53189c..147f8dd 100755 --- a/setup.sh +++ b/setup.sh @@ -341,6 +341,17 @@ fi print_header "Populating Database" print_color $BLUE "Running management commands to populate the database..." +# Create superuser +print_color $BLUE "Creating superuser..." +if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_superuser 2>/dev/null || \ + docker compose -f docker-compose-prod.yml exec -T app python manage.py create_superuser 2>/dev/null; then + print_color $GREEN "✓ Superuser created successfully" +else + print_color $YELLOW "Warning: Failed to create superuser (might already exist or need manual creation)" + print_color $BLUE "You can create a superuser manually with:" + print_color $YELLOW " docker compose -f docker-compose-prod.yml exec app python manage.py create_superuser" +fi + # Create users from JSON print_color $BLUE "Creating users from users.json..." if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_users_from_json users.json 2>/dev/null || \ @@ -368,16 +379,7 @@ else print_color $YELLOW "Warning: Failed to create RADIUSdesk instances (they might already exist)" fi -# Create superuser -print_color $BLUE "Creating superuser..." -if docker compose -f docker-compose-prod.yml exec -T app python manage.py create_superuser 2>/dev/null || \ - docker compose -f docker-compose-prod.yml exec -T app python manage.py create_superuser 2>/dev/null; then - print_color $GREEN "✓ Superuser created successfully" -else - print_color $YELLOW "Warning: Failed to create superuser (might already exist or need manual creation)" - print_color $BLUE "You can create a superuser manually with:" - print_color $YELLOW " docker compose -f docker-compose-prod.yml exec app python manage.py create_superuser" -fi + # Final status check print_header "Deployment Status"