diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 0000000..26d3352
--- /dev/null
+++ b/.idea/.gitignore
@@ -0,0 +1,3 @@
+# Default ignored files
+/shelf/
+/workspace.xml
diff --git a/.idea/discord-bot.iml b/.idea/discord-bot.iml
new file mode 100644
index 0000000..0c7e49e
--- /dev/null
+++ b/.idea/discord-bot.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 0000000..7d23c29
--- /dev/null
+++ b/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,32 @@
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 0000000..105ce2d
--- /dev/null
+++ b/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000..ef7b832
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..e201780
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000..f2c6230
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/cogs/badges.py b/cogs/badges.py
index 43f6f97..037bd6a 100644
--- a/cogs/badges.py
+++ b/cogs/badges.py
@@ -3,7 +3,7 @@
import discord
from discord.ext import commands
-from helpers.supabaseClient import PostgresClient
+from shared_migrations.db.discord_bot import DiscordBotQueries
class BadgeModal(discord.ui.Modal, title="Your Badges"):
@@ -17,7 +17,7 @@ async def on_timeout(self, interaction):
class BadgeContents:
def __init__(self, name) -> None:
- self.postgres_client = PostgresClient()
+ self.postgres_client = DiscordBotQueries()
apprentinceDesc = f"""Welcome *{name}*!!
diff --git a/cogs/discordDataScraper.py b/cogs/discordDataScraper.py
index 6a1eb0c..13a5c4a 100644
--- a/cogs/discordDataScraper.py
+++ b/cogs/discordDataScraper.py
@@ -9,7 +9,7 @@
from discord.channel import TextChannel
from discord.ext import commands, tasks
-from helpers.supabaseClient import PostgresClient
+from shared_migrations.db.discord_bot import DiscordBotQueries
with open("config.json") as config_file:
config_data = json.load(config_file)
@@ -24,7 +24,7 @@
class DiscordDataScaper(commands.Cog):
def __init__(self, bot) -> None:
self.bot = bot
- self.postgres_client = PostgresClient()
+ self.postgres_client = DiscordBotQueries()
@commands.Cog.listener()
async def on_message(self, message):
diff --git a/cogs/listeners/member_events_cog.py b/cogs/listeners/member_events_cog.py
index ebb6937..5045552 100644
--- a/cogs/listeners/member_events_cog.py
+++ b/cogs/listeners/member_events_cog.py
@@ -1,13 +1,14 @@
import discord
from discord.ext import commands
-from helpers.supabaseClient import PostgresClient
+# from helpers.supabaseClient import PostgresClient
+from shared_migrations.db.discord_bot import DiscordBotQueries
class MemberEventsListener(commands.Cog):
def __init__(self, bot) -> None:
self.bot = bot
- self.postgres_client = PostgresClient()
+ self.postgres_client = DiscordBotQueries()
super().__init__()
@commands.Cog.listener("on_member_join")
diff --git a/cogs/listeners/message_events_cog.py b/cogs/listeners/message_events_cog.py
index 5c8047d..2f223c0 100644
--- a/cogs/listeners/message_events_cog.py
+++ b/cogs/listeners/message_events_cog.py
@@ -2,10 +2,11 @@
from discord.ext import commands
from config.server import ServerConfig
-from helpers.supabaseClient import PostgresClient
+# from helpers.supabaseClient import PostgresClient
+from shared_migrations.db.discord_bot import DiscordBotQueries
serverConfig = ServerConfig()
-postgresClient = PostgresClient()
+postgresClient = DiscordBotQueries()
async def grantVerifiedRole(member: discord.Member):
diff --git a/cogs/listeners/role_events_cog.py b/cogs/listeners/role_events_cog.py
index a497947..e4d47f3 100644
--- a/cogs/listeners/role_events_cog.py
+++ b/cogs/listeners/role_events_cog.py
@@ -1,13 +1,14 @@
import discord
from discord.ext import commands
-from helpers.supabaseClient import PostgresClient
+# from helpers.supabaseClient import PostgresClient
+from shared_migrations.db.discord_bot import DiscordBotQueries
class RoleEventsListener(commands.Cog):
def __init__(self, bot) -> None:
self.bot = bot
- self.postgres_client = PostgresClient()
+ self.postgres_client = DiscordBotQueries()
super().__init__()
@commands.Cog.listener()
diff --git a/cogs/serverManagement.py b/cogs/serverManagement.py
index ba842ab..12b1f36 100644
--- a/cogs/serverManagement.py
+++ b/cogs/serverManagement.py
@@ -3,7 +3,7 @@
from discord.ext import commands, tasks
from config.server import ServerConfig
-from helpers.supabaseClient import PostgresClient
+from shared_migrations.db.discord_bot import DiscordBotQueries
serverConfig = ServerConfig()
@@ -11,7 +11,7 @@
class ServerManagement(commands.Cog):
def __init__(self, bot):
self.bot: commands.Bot = bot
- self.postgres_client = PostgresClient()
+ self.postgres_client = DiscordBotQueries()
def validUser(self, ctx):
authorised_users = [
diff --git a/cogs/userInteractions.py b/cogs/userInteractions.py
index 270c0b6..e8ad0ac 100644
--- a/cogs/userInteractions.py
+++ b/cogs/userInteractions.py
@@ -5,7 +5,7 @@
from discord.ext import commands, tasks
from dotenv import find_dotenv, load_dotenv
-from helpers.supabaseClient import PostgresClient
+from shared_migrations.db.discord_bot import DiscordBotQueries
load_dotenv(find_dotenv())
@@ -16,7 +16,7 @@ class UserHandler(commands.Cog):
def __init__(self, bot) -> None:
self.bot = bot
self.update_contributors.start()
- self.postgres_client = PostgresClient()
+ self.postgres_client = DiscordBotQueries()
@tasks.loop(minutes=60)
async def update_contributors(self):
diff --git a/cogs/vcCog.py b/cogs/vcCog.py
index bfc6f90..969ae36 100644
--- a/cogs/vcCog.py
+++ b/cogs/vcCog.py
@@ -6,7 +6,7 @@
from discord.ext import commands
from config.server import ServerConfig
-from helpers.supabaseClient import PostgresClient
+from shared_migrations.db.discord_bot import DiscordBotQueries
"""
with io.BytesIO(image_bytes) as image_file:
@@ -21,7 +21,7 @@
class CommunityVCView(ui.View):
def __init__(self, *, timeout=None):
super().__init__(timeout=timeout)
- self.postgres_client = PostgresClient()
+ self.postgres_client = DiscordBotQueries()
def isCommunityContributor(self, roles: list[Role]):
CommunityContributorRoleID = ServerConfig.Roles.CONTRIBUTOR_ROLE
@@ -215,7 +215,7 @@ async def serveCertificateLink(self, interaction: Interaction, button: ui.Button
class VCProgramSelection(ui.View):
def __init__(self, *, timeout=None):
- self.postgres_client = PostgresClient()
+ self.postgres_client = DiscordBotQueries()
super().__init__(timeout=timeout)
async def resetSelectMenu(self, interaction):
diff --git a/helpers/supabaseClient.py b/helpers/supabaseClient.py
index f1e5925..5736a0f 100644
--- a/helpers/supabaseClient.py
+++ b/helpers/supabaseClient.py
@@ -6,7 +6,8 @@
from dotenv import load_dotenv
from sqlalchemy import create_engine,select,desc,update,delete
from sqlalchemy.orm import sessionmaker
-from models import *
+# from models import *
+from shared_migrations.db.models import *
from sqlalchemy.ext.declarative import DeclarativeMeta
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
diff --git a/main.py b/main.py
index 20da47b..16cc830 100644
--- a/main.py
+++ b/main.py
@@ -7,7 +7,7 @@
from discord.ext import commands
from cogs.vcCog import VCProgramSelection
-from helpers.supabaseClient import PostgresClient
+from shared_migrations.db.discord_bot import DiscordBotQueries
from dotenv import load_dotenv, find_dotenv
@@ -70,7 +70,7 @@ async def on_submit(self, interaction: discord.Interaction):
except Exception as e:
print('exception e ', e)
try:
- response = await PostgresClient().updateContributor(user_data)
+ response = await DiscordBotQueries().updateContributor(user_data)
print("DB updated for user:", user_data["discord_id"])
except Exception as e:
print("Failed to update credentials for user: "+e)
@@ -96,7 +96,7 @@ async def hasIntroduced():
while not authentication:
print("Not authenticated. Waiting")
await asyncio.sleep(15)
- authentication = await PostgresClient().read("contributors_registration", "discord_id", user.id)
+ authentication = await DiscordBotQueries().read("contributors_registration", "discord_id", user.id)
print("User has authenticated")
return True
diff --git a/shared_migrations/.gitignore b/shared_migrations/.gitignore
new file mode 100644
index 0000000..2eea525
--- /dev/null
+++ b/shared_migrations/.gitignore
@@ -0,0 +1 @@
+.env
\ No newline at end of file
diff --git a/shared_migrations/.vscode/settings.json b/shared_migrations/.vscode/settings.json
new file mode 100644
index 0000000..acd8ec1
--- /dev/null
+++ b/shared_migrations/.vscode/settings.json
@@ -0,0 +1,5 @@
+{
+ "python.analysis.extraPaths": [
+ "./db"
+ ]
+}
\ No newline at end of file
diff --git a/shared_migrations/README.md b/shared_migrations/README.md
new file mode 100644
index 0000000..88b2a7e
Binary files /dev/null and b/shared_migrations/README.md differ
diff --git a/shared_migrations/alembic.ini b/shared_migrations/alembic.ini
new file mode 100644
index 0000000..8dae057
--- /dev/null
+++ b/shared_migrations/alembic.ini
@@ -0,0 +1,118 @@
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts
+# Use forward slashes (/) also on windows to provide an os agnostic path
+script_location = migrations
+
+# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
+# Uncomment the line below if you want the files to be prepended with date and time
+# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
+# for all available tokens
+# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
+
+# sys.path path, will be prepended to sys.path if present.
+# defaults to the current working directory.
+prepend_sys_path = .
+
+# timezone to use when rendering the date within the migration file
+# as well as the filename.
+# If specified, requires the python>=3.9 or backports.zoneinfo library.
+# Any required deps can installed by adding `alembic[tz]` to the pip requirements
+# string value is passed to ZoneInfo()
+# leave blank for localtime
+# timezone =
+
+# max length of characters to apply to the "slug" field
+# truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+# version location specification; This defaults
+# to migrations/versions. When using multiple version
+# directories, initial revisions must be specified with --version-path.
+# The path separator used here should be the separator specified by "version_path_separator" below.
+# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
+
+# version path separator; As mentioned above, this is the character used to split
+# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
+# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
+# Valid values for version_path_separator are:
+#
+# version_path_separator = :
+# version_path_separator = ;
+# version_path_separator = space
+# version_path_separator = newline
+version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
+
+# set to 'true' to search source files recursively
+# in each "version_locations" directory
+# new in Alembic version 1.10
+# recursive_version_locations = false
+
+# the output encoding used when revision files
+# are written from script.py.mako
+# output_encoding = utf-8
+
+sqlalchemy.url = %(DATABASE_URL)s
+
+
+
+[post_write_hooks]
+# post_write_hooks defines scripts or Python functions that are run
+# on newly generated revision scripts. See the documentation for further
+# detail and examples
+
+# format using "black" - use the console_scripts runner, against the "black" entrypoint
+# hooks = black
+# black.type = console_scripts
+# black.entrypoint = black
+# black.options = -l 79 REVISION_SCRIPT_FILENAME
+
+# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
+# hooks = ruff
+# ruff.type = exec
+# ruff.executable = %(here)s/.venv/bin/ruff
+# ruff.options = --fix REVISION_SCRIPT_FILENAME
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARNING
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARNING
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/shared_migrations/db/__init__.py b/shared_migrations/db/__init__.py
new file mode 100644
index 0000000..52d6df0
--- /dev/null
+++ b/shared_migrations/db/__init__.py
@@ -0,0 +1,33 @@
+import os
+
+# from discord import Member
+from dotenv import load_dotenv
+from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
+from sqlalchemy.orm import sessionmaker
+from sqlalchemy.pool import NullPool
+
+load_dotenv(".env")
+
+
+def get_postgres_uri():
+ DB_HOST = os.getenv('POSTGRES_DB_HOST')
+ DB_NAME = os.getenv('POSTGRES_DB_NAME')
+ DB_USER = os.getenv('POSTGRES_DB_USER')
+ DB_PASS = os.getenv('POSTGRES_DB_PASS')
+
+ # DB_URL = os.getenv('DATABASE_URL')
+ # print('db')
+ return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}'
+
+
+class PostgresORM:
+
+ def __init__(self):
+ DATABASE_URL = get_postgres_uri()
+ # Initialize Async SQLAlchemy
+ engine = create_async_engine(DATABASE_URL, echo=False,poolclass=NullPool)
+ async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession)
+ self.session = async_session
+
+ def get_instance():
+ return PostgresORM()
\ No newline at end of file
diff --git a/shared_migrations/db/__pycache__/__init__.cpython-310.pyc b/shared_migrations/db/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000..d65e3e5
Binary files /dev/null and b/shared_migrations/db/__pycache__/__init__.cpython-310.pyc differ
diff --git a/shared_migrations/db/__pycache__/models.cpython-310.pyc b/shared_migrations/db/__pycache__/models.cpython-310.pyc
new file mode 100644
index 0000000..860a0bb
Binary files /dev/null and b/shared_migrations/db/__pycache__/models.cpython-310.pyc differ
diff --git a/shared_migrations/db/discord-bot.py b/shared_migrations/db/discord-bot.py
new file mode 100644
index 0000000..e62dd8b
--- /dev/null
+++ b/shared_migrations/db/discord-bot.py
@@ -0,0 +1,302 @@
+import os
+
+# from discord import Member
+from dotenv import load_dotenv
+from sqlalchemy import create_engine,select,desc,update,delete
+from sqlalchemy.orm import sessionmaker
+from .models import *
+from sqlalchemy.ext.declarative import DeclarativeMeta
+from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
+
+# load_dotenv()
+
+class DiscordBotQueries:
+ # def __init__(self):
+ # DB_HOST = os.getenv('POSTGRES_DB_HOST')
+ # DB_NAME = os.getenv('POSTGRES_DB_NAME')
+ # DB_USER = os.getenv('POSTGRES_DB_USER')
+ # DB_PASS = os.getenv('POSTGRES_DB_PASS')
+
+ # engine = create_async_engine(f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}')
+ # async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession)
+ # self.session = async_session
+
+ # def get_instance():
+ # return PostgresClient()
+
+ def convert_dict(self,data):
+ try:
+ if type(data) == list:
+ data = [val.to_dict() for val in data]
+ else:
+ return [data.to_dict()]
+
+ return data
+ except Exception as e:
+ print(e)
+ raise Exception
+
+ def getStatsStorage(self, fileName):
+ return self.client.storage.from_("c4gt-github-profile").download(fileName)
+
+
+ def logVCAction(self,user, action):
+ try:
+ new_log = VcLogs(discord_id=user.id, discord_name=user.name, option=action)
+ self.session.add(new_log)
+ self.session.commit()
+ return self.convert_dict(new_log)
+ except Exception as e:
+ self.session.rollback()
+ print("Error logging VC action:", e)
+ return None
+
+ def getLeaderboard(self, id: int):
+ data = self.session.query(Leaderboard).where(Leaderboard.discord_id == id).all()
+ return self.convert_dict(data)
+
+
+ def read(self, table_class, query_key, query_value, columns=None):
+ try:
+ stmt = select(table_class)
+ stmt = stmt.where(getattr(table_class, query_key) == query_value)
+
+ if columns:
+ stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns))
+ result = self.session.execute(stmt)
+ rows = result.fetchall()
+ column_names = [col.name for col in stmt.columns]
+ data = [dict(zip(column_names, row)) for row in rows]
+ return data
+
+ result = self.session.execute(stmt)
+ return self.convert_dict(result.scalars().all())
+
+ except Exception as e:
+ print(f"Error reading data from table '{table_class}':", e)
+ return None
+
+ def get_class_by_tablename(self,tablename):
+ try:
+ for cls in Base.registry._class_registry.values():
+ if isinstance(cls, DeclarativeMeta):
+ if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename:
+ return cls
+ return None
+ except Exception as e:
+ print(f"ERROR get_class_by_tablename - {e}")
+ return None
+
+ def read_by_order_limit(self, table_class, query_key, query_value, order_column, order_by=False, limit=1, columns="*"):
+ try:
+ stmt = select(table_class)
+ stmt = stmt.where(getattr(table_class, query_key) == query_value)
+ if order_by:
+ stmt = stmt.order_by(desc(getattr(table_class, order_column)))
+ else:
+ stmt = stmt.order_by(getattr(table_class, order_column))
+
+ stmt = stmt.limit(limit)
+ if columns != "*":
+ stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns))
+
+ result = self.session.execute(stmt)
+ results = result.fetchall()
+
+ # Convert results to list of dictionaries
+ column_names = [col['name'] for col in result.keys()]
+ data = [dict(zip(column_names, row)) for row in results]
+
+ return data
+
+ except Exception as e:
+ print("Error reading data:", e)
+ return None
+
+ async def read_all(self,table_class):
+ try:
+ table = self.get_class_by_tablename(table_class)
+ # Query all records from the specified table class
+ async with self.session() as session:
+ stmt = select(table)
+ result = await session.execute(stmt)
+
+ data = result.scalars().all()
+ result = self.convert_dict(data)
+ return result
+ except Exception as e:
+ print(f"An error occurred -read_all_from_table : {e}")
+ return None
+
+ def update(self, table_class, update_data, query_key, query_value):
+ try:
+ stmt = (
+ update(table_class)
+ .where(getattr(table_class, query_key) == query_value)
+ .values(update_data)
+ .returning(*[getattr(table_class, col) for col in update_data.keys()]) # Return updated columns
+ )
+
+ result = self.session.execute(stmt)
+ self.session.commit()
+ updated_record = result.fetchone()
+
+ if updated_record:
+ updated_record_dict = dict(zip(result.keys(), updated_record))
+ return updated_record_dict
+ else:
+ return None
+ except Exception as e:
+ import pdb;pdb.set_trace()
+ print("Error updating record:", e)
+ return None
+
+
+ def insert(self, table, data):
+ try:
+ new_record = table(**data)
+ self.session.add(new_record)
+ self.session.commit()
+ return new_record.to_dict()
+ except Exception as e:
+ print("Error inserting data:", e)
+ self.session.rollback() # Rollback in case of error
+ return None
+
+
+ def memberIsAuthenticated(self, member: Member):
+ data = self.session.query(ContributorsRegistration).where(ContributorsRegistration.discord_id == member.id).all()
+ if data:
+ return True
+ else:
+ return False
+
+ def addChapter(self, roleId: int, orgName: str, type: str):
+ try:
+ existing_record = self.session.query(Chapters).filter_by(discord_role_id=roleId).first()
+
+ if existing_record:
+ existing_record.type = type
+ existing_record.org_name = orgName
+ else:
+ new_record = Chapters(discord_role_id=roleId, type=type, org_name=orgName)
+ self.session.add(new_record)
+
+ self.session.commit()
+ return existing_record.to_dict() if existing_record else new_record.to_dict()
+ except Exception as e:
+ print("Error adding or updating chapter:", e)
+ return None
+
+
+ def deleteChapter(self,roleId: int):
+ try:
+ # Build the delete statement
+ stmt = delete(Chapters).where(Chapters.discord_role_id == roleId)
+ result = self.session.execute(stmt)
+ self.session.commit()
+ return True if result.rowcount else False
+ except Exception as e:
+ print("Error deleting chapter:", e)
+ return None
+
+ async def updateContributor(self, contributor: Member, table_class=None):
+ try:
+ async with self.session() as session:
+ if table_class == None:
+ table_class = ContributorsDiscord
+ chapters = lookForRoles(contributor.roles)["chapter_roles"]
+ gender = lookForRoles(contributor.roles)["gender"]
+
+ # Prepare the data to be upserted
+ update_data = {
+ "discord_id": contributor.id,
+ "discord_username": contributor.name,
+ "chapter": chapters[0] if chapters else None,
+ "gender": gender,
+ "joined_at": contributor.joined_at,
+ }
+
+ stmt = select(ContributorsDiscord).where(ContributorsDiscord.discord_id == contributor.id)
+ result = await session.execute(stmt)
+ existing_record = result.scalars().first()
+
+ # existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first()
+
+ if existing_record:
+ stmt = (
+ update(table_class)
+ .where(table_class.discord_id == contributor.id)
+ .values(update_data)
+ )
+ self.session.execute(stmt)
+ else:
+ new_record = table_class(**update_data)
+ self.session.add(new_record)
+
+ # Commit the transaction
+ self.session.commit()
+ return True
+ except Exception as e:
+ print("Error updating contributor:", e)
+ return False
+
+
+ def updateContributors(self, contributors: [Member], table_class):
+ try:
+ for contributor in contributors:
+ chapters = lookForRoles(contributor.roles)["chapter_roles"]
+ gender = lookForRoles(contributor.roles)["gender"]
+ update_data = {
+ "discord_id": contributor.id,
+ "discord_username": contributor.name,
+ "chapter": chapters[0] if chapters else None,
+ "gender": gender,
+ "joined_at": contributor.joined_at,
+ }
+ existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first()
+
+ if existing_record:
+ stmt = (
+ update(table_class)
+ .where(table_class.discord_id == contributor.id)
+ .values(update_data)
+ )
+ self.session.execute(stmt)
+ else:
+ new_record = table_class(**update_data)
+ self.session.add(new_record)
+
+ self.session.commit()
+ return True
+ except Exception as e:
+ print("Error updating contributors:", e)
+ return False
+
+
+ def deleteContributorDiscord(self, contributorDiscordIds, table_class=None):
+ try:
+ if table_class == None:
+ table_class = ContributorsDiscord
+ stmt = delete(table_class).where(table_class.discord_id.in_(contributorDiscordIds))
+ self.session.execute(stmt)
+ self.session.commit()
+
+ return True
+ except Exception as e:
+ print("Error deleting contributors:", e)
+ self.session.rollback()
+ return False
+
+
+
+ def read_all_active(self, table):
+ if table == "contributors_discord":
+ table = ContributorsDiscord
+ data = self.session.query(table).where(table.is_active == True).all()
+ return self.convert_dict(data)
+
+ def invalidateContributorDiscord(self, contributorDiscordIds):
+ table = "contributors_discord"
+ for id in contributorDiscordIds:
+ self.client.table(table).update({'is_active': 'false'}).eq('discord_id', id).execute()
diff --git a/shared_migrations/db/discord_bot.py b/shared_migrations/db/discord_bot.py
new file mode 100644
index 0000000..e62dd8b
--- /dev/null
+++ b/shared_migrations/db/discord_bot.py
@@ -0,0 +1,302 @@
+import os
+
+# from discord import Member
+from dotenv import load_dotenv
+from sqlalchemy import create_engine,select,desc,update,delete
+from sqlalchemy.orm import sessionmaker
+from .models import *
+from sqlalchemy.ext.declarative import DeclarativeMeta
+from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
+
+# load_dotenv()
+
+class DiscordBotQueries:
+ # def __init__(self):
+ # DB_HOST = os.getenv('POSTGRES_DB_HOST')
+ # DB_NAME = os.getenv('POSTGRES_DB_NAME')
+ # DB_USER = os.getenv('POSTGRES_DB_USER')
+ # DB_PASS = os.getenv('POSTGRES_DB_PASS')
+
+ # engine = create_async_engine(f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}')
+ # async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession)
+ # self.session = async_session
+
+ # def get_instance():
+ # return PostgresClient()
+
+ def convert_dict(self,data):
+ try:
+ if type(data) == list:
+ data = [val.to_dict() for val in data]
+ else:
+ return [data.to_dict()]
+
+ return data
+ except Exception as e:
+ print(e)
+ raise Exception
+
+ def getStatsStorage(self, fileName):
+ return self.client.storage.from_("c4gt-github-profile").download(fileName)
+
+
+ def logVCAction(self,user, action):
+ try:
+ new_log = VcLogs(discord_id=user.id, discord_name=user.name, option=action)
+ self.session.add(new_log)
+ self.session.commit()
+ return self.convert_dict(new_log)
+ except Exception as e:
+ self.session.rollback()
+ print("Error logging VC action:", e)
+ return None
+
+ def getLeaderboard(self, id: int):
+ data = self.session.query(Leaderboard).where(Leaderboard.discord_id == id).all()
+ return self.convert_dict(data)
+
+
+ def read(self, table_class, query_key, query_value, columns=None):
+ try:
+ stmt = select(table_class)
+ stmt = stmt.where(getattr(table_class, query_key) == query_value)
+
+ if columns:
+ stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns))
+ result = self.session.execute(stmt)
+ rows = result.fetchall()
+ column_names = [col.name for col in stmt.columns]
+ data = [dict(zip(column_names, row)) for row in rows]
+ return data
+
+ result = self.session.execute(stmt)
+ return self.convert_dict(result.scalars().all())
+
+ except Exception as e:
+ print(f"Error reading data from table '{table_class}':", e)
+ return None
+
+ def get_class_by_tablename(self,tablename):
+ try:
+ for cls in Base.registry._class_registry.values():
+ if isinstance(cls, DeclarativeMeta):
+ if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename:
+ return cls
+ return None
+ except Exception as e:
+ print(f"ERROR get_class_by_tablename - {e}")
+ return None
+
+ def read_by_order_limit(self, table_class, query_key, query_value, order_column, order_by=False, limit=1, columns="*"):
+ try:
+ stmt = select(table_class)
+ stmt = stmt.where(getattr(table_class, query_key) == query_value)
+ if order_by:
+ stmt = stmt.order_by(desc(getattr(table_class, order_column)))
+ else:
+ stmt = stmt.order_by(getattr(table_class, order_column))
+
+ stmt = stmt.limit(limit)
+ if columns != "*":
+ stmt = stmt.with_only_columns(*(getattr(table_class, col) for col in columns))
+
+ result = self.session.execute(stmt)
+ results = result.fetchall()
+
+ # Convert results to list of dictionaries
+ column_names = [col['name'] for col in result.keys()]
+ data = [dict(zip(column_names, row)) for row in results]
+
+ return data
+
+ except Exception as e:
+ print("Error reading data:", e)
+ return None
+
+ async def read_all(self,table_class):
+ try:
+ table = self.get_class_by_tablename(table_class)
+ # Query all records from the specified table class
+ async with self.session() as session:
+ stmt = select(table)
+ result = await session.execute(stmt)
+
+ data = result.scalars().all()
+ result = self.convert_dict(data)
+ return result
+ except Exception as e:
+ print(f"An error occurred -read_all_from_table : {e}")
+ return None
+
+ def update(self, table_class, update_data, query_key, query_value):
+ try:
+ stmt = (
+ update(table_class)
+ .where(getattr(table_class, query_key) == query_value)
+ .values(update_data)
+ .returning(*[getattr(table_class, col) for col in update_data.keys()]) # Return updated columns
+ )
+
+ result = self.session.execute(stmt)
+ self.session.commit()
+ updated_record = result.fetchone()
+
+ if updated_record:
+ updated_record_dict = dict(zip(result.keys(), updated_record))
+ return updated_record_dict
+ else:
+ return None
+ except Exception as e:
+ import pdb;pdb.set_trace()
+ print("Error updating record:", e)
+ return None
+
+
+ def insert(self, table, data):
+ try:
+ new_record = table(**data)
+ self.session.add(new_record)
+ self.session.commit()
+ return new_record.to_dict()
+ except Exception as e:
+ print("Error inserting data:", e)
+ self.session.rollback() # Rollback in case of error
+ return None
+
+
+ def memberIsAuthenticated(self, member: Member):
+ data = self.session.query(ContributorsRegistration).where(ContributorsRegistration.discord_id == member.id).all()
+ if data:
+ return True
+ else:
+ return False
+
+ def addChapter(self, roleId: int, orgName: str, type: str):
+ try:
+ existing_record = self.session.query(Chapters).filter_by(discord_role_id=roleId).first()
+
+ if existing_record:
+ existing_record.type = type
+ existing_record.org_name = orgName
+ else:
+ new_record = Chapters(discord_role_id=roleId, type=type, org_name=orgName)
+ self.session.add(new_record)
+
+ self.session.commit()
+ return existing_record.to_dict() if existing_record else new_record.to_dict()
+ except Exception as e:
+ print("Error adding or updating chapter:", e)
+ return None
+
+
+ def deleteChapter(self,roleId: int):
+ try:
+ # Build the delete statement
+ stmt = delete(Chapters).where(Chapters.discord_role_id == roleId)
+ result = self.session.execute(stmt)
+ self.session.commit()
+ return True if result.rowcount else False
+ except Exception as e:
+ print("Error deleting chapter:", e)
+ return None
+
+ async def updateContributor(self, contributor: Member, table_class=None):
+ try:
+ async with self.session() as session:
+ if table_class == None:
+ table_class = ContributorsDiscord
+ chapters = lookForRoles(contributor.roles)["chapter_roles"]
+ gender = lookForRoles(contributor.roles)["gender"]
+
+ # Prepare the data to be upserted
+ update_data = {
+ "discord_id": contributor.id,
+ "discord_username": contributor.name,
+ "chapter": chapters[0] if chapters else None,
+ "gender": gender,
+ "joined_at": contributor.joined_at,
+ }
+
+ stmt = select(ContributorsDiscord).where(ContributorsDiscord.discord_id == contributor.id)
+ result = await session.execute(stmt)
+ existing_record = result.scalars().first()
+
+ # existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first()
+
+ if existing_record:
+ stmt = (
+ update(table_class)
+ .where(table_class.discord_id == contributor.id)
+ .values(update_data)
+ )
+ self.session.execute(stmt)
+ else:
+ new_record = table_class(**update_data)
+ self.session.add(new_record)
+
+ # Commit the transaction
+ self.session.commit()
+ return True
+ except Exception as e:
+ print("Error updating contributor:", e)
+ return False
+
+
+ def updateContributors(self, contributors: [Member], table_class):
+ try:
+ for contributor in contributors:
+ chapters = lookForRoles(contributor.roles)["chapter_roles"]
+ gender = lookForRoles(contributor.roles)["gender"]
+ update_data = {
+ "discord_id": contributor.id,
+ "discord_username": contributor.name,
+ "chapter": chapters[0] if chapters else None,
+ "gender": gender,
+ "joined_at": contributor.joined_at,
+ }
+ existing_record = self.session.query(table_class).filter_by(discord_id=contributor.id).first()
+
+ if existing_record:
+ stmt = (
+ update(table_class)
+ .where(table_class.discord_id == contributor.id)
+ .values(update_data)
+ )
+ self.session.execute(stmt)
+ else:
+ new_record = table_class(**update_data)
+ self.session.add(new_record)
+
+ self.session.commit()
+ return True
+ except Exception as e:
+ print("Error updating contributors:", e)
+ return False
+
+
+ def deleteContributorDiscord(self, contributorDiscordIds, table_class=None):
+ try:
+ if table_class == None:
+ table_class = ContributorsDiscord
+ stmt = delete(table_class).where(table_class.discord_id.in_(contributorDiscordIds))
+ self.session.execute(stmt)
+ self.session.commit()
+
+ return True
+ except Exception as e:
+ print("Error deleting contributors:", e)
+ self.session.rollback()
+ return False
+
+
+
+ def read_all_active(self, table):
+ if table == "contributors_discord":
+ table = ContributorsDiscord
+ data = self.session.query(table).where(table.is_active == True).all()
+ return self.convert_dict(data)
+
+ def invalidateContributorDiscord(self, contributorDiscordIds):
+ table = "contributors_discord"
+ for id in contributorDiscordIds:
+ self.client.table(table).update({'is_active': 'false'}).eq('discord_id', id).execute()
diff --git a/shared_migrations/db/dmp_api.py b/shared_migrations/db/dmp_api.py
new file mode 100644
index 0000000..84f5a82
--- /dev/null
+++ b/shared_migrations/db/dmp_api.py
@@ -0,0 +1,107 @@
+from .models import *
+from sqlalchemy import func
+import os
+from dotenv import load_dotenv
+# from flask_sqlalchemy import SQLAlchemy
+import sqlalchemy
+from sqlalchemy.future import select
+
+
+class DmpAPIQueries:
+
+ async def get_issue_query(async_session):
+ try:
+ async with async_session() as session:
+ results = await session.execute(
+ select(
+ DmpOrgs.id.label('org_id'),
+ DmpOrgs.name.label('org_name'),
+ func.json_agg(
+ func.json_build_object(
+ 'id', DmpIssues.id,
+ 'name', DmpIssues.title
+ )
+ ).label('issues')
+ )
+ .outerjoin(DmpIssues, DmpOrgs.id == DmpIssues.org_id)
+ .group_by(DmpOrgs.id)
+ .order_by(DmpOrgs.id)
+ )
+
+ # Extract results as a list of dictionaries if needed
+ data = results.all()
+
+ return data
+ except Exception as e:
+ print(f"An error occurred: get_column_value {e}")
+ return None
+
+
+ async def get_issue_owner(async_session, name):
+ try:
+ async with async_session() as session:
+ response = await session.execute(
+ select(DmpOrgs).filter_by(name=name)
+ )
+ results = response.scalars().all()
+ return results
+ except Exception as e:
+ print(f"An error occurred: get_column_value {e}")
+ return None
+
+ async def get_actual_owner_query(async_session, owner):
+ try:
+ async with async_session() as session:
+ response = await session.execute(
+ select(DmpIssues).filter(DmpIssues.repo_owner.like(f'%{owner}%'))
+ )
+ results = response.scalars().all() # Fetch all matching rows as objects
+ results = [val.to_dict() for val in results] # Convert objects to dicts
+ return results
+ except Exception as e:
+ print(f"An error occurred: get_column_value {e}")
+ return None
+
+
+ async def get_dmp_issues(async_session, issue_id):
+ try:
+ async with async_session() as session:
+ response = await session.execute(
+ select(DmpIssues).filter_by(id=int(issue_id))
+ )
+ results = response.scalars().all() # Fetch all matching rows as objects
+ results = [val.to_dict() for val in results] # Convert objects to dicts
+ return results
+ except Exception as e:
+ print(f"An error occurred: get_column_value {e}")
+ return None
+
+
+ async def get_dmp_issue_updates(async_session, dmp_issue_id):
+ try:
+ async with async_session() as session:
+ response = await session.execute(
+ select(DmpIssueUpdates).filter_by(dmp_id=int(dmp_issue_id))
+ )
+ results = response.scalars().all() # Fetch all matching rows as objects
+ results = [val.to_dict() for val in results] # Convert objects to dicts
+ return results
+ except Exception as e:
+ print(f"An error occurred: get_column_value {e}")
+ return None
+
+
+ async def get_pr_data(async_session, dmp_issue_id):
+ try:
+ async with async_session() as session:
+ response = await session.execute(
+ select(DmpPrUpdates).filter_by(dmp_id=int(dmp_issue_id))
+ )
+ pr_updates = response.scalars().all() # Fetch all matching rows as objects
+ pr_updates_dict = [pr_update.to_dict() for pr_update in pr_updates] # Convert objects to dicts
+ return pr_updates_dict
+ except Exception as e:
+ print(f"An error occurred: get_column_value {e}")
+ return None
+
+
\ No newline at end of file
diff --git a/shared_migrations/db/dmp_cron.py b/shared_migrations/db/dmp_cron.py
new file mode 100644
index 0000000..8dd0135
--- /dev/null
+++ b/shared_migrations/db/dmp_cron.py
@@ -0,0 +1,246 @@
+from sqlalchemy.future import select
+from .models import *
+from sqlalchemy import update
+# from app import async_session
+from sqlalchemy.dialects.postgresql import insert
+from datetime import datetime
+from sqlalchemy.orm import aliased
+import os
+from sqlalchemy.orm import Session
+from sqlalchemy.exc import NoResultFound
+
+
+class DmpCronQueries:
+
+ # def get_postgres_uri():
+ # DB_HOST = os.getenv('POSTGRES_DB_HOST')
+ # DB_NAME = os.getenv('POSTGRES_DB_NAME')
+ # DB_USER = os.getenv('POSTGRES_DB_USER')
+ # DB_PASS = os.getenv('POSTGRES_DB_PASS')
+
+ # return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}'
+
+ async def get_timestamp(async_session, model, col_name: str, col: str, value):
+ try:
+ # Construct the ORM query
+ query = select(getattr(model, col_name)).filter(getattr(model, col) == value)
+
+ # Execute the query and fetch the result
+ async with async_session() as session:
+ result = await session.execute(query)
+ return result.scalar()
+
+ except NoResultFound:
+ return None
+ except Exception as e:
+ print(f"An error occurred: get_column_value {e}")
+ return None
+
+ async def get_all_dmp_issues(async_session):
+ try:
+ async with async_session() as session:
+ # Alias for the DmpOrg table to use in the JSON_BUILD_OBJECT
+ dmp_org_alias = aliased(DmpOrgs)
+
+ # Build the query
+ query = (
+ select(
+ DmpIssues,
+ func.json_build_object(
+ 'created_at', dmp_org_alias.created_at,
+ 'description', dmp_org_alias.description,
+ 'id', dmp_org_alias.id,
+ 'link', dmp_org_alias.link,
+ 'name', dmp_org_alias.name,
+ 'repo_owner', dmp_org_alias.repo_owner
+ ).label('dmp_orgs')
+ )
+ .outerjoin(dmp_org_alias, DmpIssues.org_id == dmp_org_alias.id)
+ .filter(DmpIssues.org_id.isnot(None))
+ .order_by(DmpIssues.id)
+ )
+
+ # Execute the query and fetch results
+ result = await session.execute(query)
+ rows = result.fetchall()
+
+ # Convert results to dictionaries
+ data = []
+ for row in rows:
+ issue_dict = row._asdict() # Convert row to dict
+ dmp_orgs = issue_dict.pop('dmp_orgs') # Extract JSON object from row
+ issue_dict['dmp_orgs'] = dmp_orgs
+ issue_dict.update(issue_dict['DmpIssues'].to_dict())
+ # Add JSON object back to dict
+ del issue_dict['DmpIssues']
+ data.append(issue_dict)
+
+ return data
+
+ except Exception as e:
+ print(e)
+ raise Exception
+
+ async def update_dmp_issue(async_session,issue_id: int, update_data: dict):
+ try:
+ async with async_session() as session:
+ async with session.begin():
+ # Build the update query
+ query = (
+ update(DmpIssues)
+ .where(DmpIssues.id == issue_id)
+ .values(**update_data)
+ )
+
+ # Execute the query
+ await session.execute(query)
+ await session.commit()
+ return True
+
+ except Exception as e:
+ return False
+
+
+ async def upsert_data_orm(async_session, update_data):
+ try:
+
+ async with async_session() as session:
+ async with session.begin():
+
+ # Define the insert statement
+ stmt = insert(DmpIssueUpdates).values(**update_data)
+
+ # Define the update statement in case of conflict
+ stmt = stmt.on_conflict_do_update(
+ index_elements=['comment_id'],
+ set_={
+ 'body_text': stmt.excluded.body_text,
+ 'comment_link': stmt.excluded.comment_link,
+ 'comment_api': stmt.excluded.comment_api,
+ 'comment_updated_at': stmt.excluded.comment_updated_at,
+ 'dmp_id': stmt.excluded.dmp_id,
+ 'created_by': stmt.excluded.created_by,
+ 'created_at': stmt.excluded.created_at
+ }
+ )
+
+ # Execute the statement
+ await session.execute(stmt)
+ await session.commit()
+
+ return True
+
+ except Exception as e:
+ print(e)
+ return False
+
+
+
+ async def upsert_pr_update(async_session, pr_update_data):
+ try:
+ async with async_session() as session:
+ async with session.begin():
+ pr_update_data['pr_updated_at'] = datetime.fromisoformat(pr_update_data['pr_updated_at']).replace(tzinfo=None) if pr_update_data['pr_updated_at'] else None
+ pr_update_data['merged_at'] = datetime.fromisoformat(pr_update_data['merged_at']).replace(tzinfo=None) if pr_update_data['merged_at'] else None
+ pr_update_data['closed_at'] = datetime.fromisoformat(pr_update_data['closed_at']).replace(tzinfo=None) if pr_update_data['closed_at'] else None
+
+ # Prepare the insert statement
+ stmt = insert(DmpPrUpdates).values(**pr_update_data)
+
+ # Prepare the conflict resolution strategy
+ stmt = stmt.on_conflict_do_update(
+ index_elements=['pr_id'], # Assuming `pr_id` is the unique key
+ set_={
+ 'status': stmt.excluded.status,
+ 'merged_at': stmt.excluded.merged_at,
+ 'closed_at': stmt.excluded.closed_at,
+ 'pr_updated_at': stmt.excluded.pr_updated_at,
+ 'dmp_id': stmt.excluded.dmp_id,
+ 'created_at': stmt.excluded.created_at,
+ 'title': stmt.excluded.title,
+ 'link': stmt.excluded.link
+ }
+ )
+ # Execute and commit the transaction
+ await session.execute(stmt)
+ await session.commit()
+
+ return True
+
+ except Exception as e:
+ print(e)
+ return False
+
+
+
+ async def update_dmp_week_update(async_session, update_data):
+ try:
+ async with async_session() as session:
+ async with session.begin():
+ # Define the filter conditions
+ stmt = (
+ select(DmpWeekUpdates)
+ .where(
+ DmpWeekUpdates.week == update_data['week'],
+ DmpWeekUpdates.dmp_id == update_data['dmp_id']
+ )
+ )
+
+ # Fetch the row that needs to be updated
+ result = await session.execute(stmt)
+ dmp_week_update = result.scalars().first()
+
+ if dmp_week_update:
+ # Update the fields with the values from update_data
+ for key, value in update_data.items():
+ setattr(dmp_week_update, key, value)
+
+ # Commit the changes
+ await session.commit()
+ return True
+ except Exception as e:
+ print(e)
+ return False
+
+
+
+ async def get_week_updates(async_session, dmp_id, week):
+ try:
+ async with async_session() as session:
+ # Build the ORM query
+ stmt = select(DmpWeekUpdates).where(
+ DmpWeekUpdates.dmp_id == dmp_id,
+ DmpWeekUpdates.week == week
+ )
+ # Execute the query
+ result = await session.execute(stmt)
+
+ # Fetch all matching rows
+ week_updates = result.scalars().all()
+
+
+ return True if len(week_updates)>0 else False
+
+ except Exception as e:
+ return False
+
+
+
+ async def insert_dmp_week_update(async_session, update_data):
+ try:
+ async with async_session() as session:
+ async with session.begin():
+ # Define the insert statement
+ stmt = insert(DmpWeekUpdates).values(**update_data)
+
+ # Execute the statement
+ await session.execute(stmt)
+ await session.commit()
+
+ return True
+
+ except Exception as e:
+ print(e)
+ return False
+
+
diff --git a/shared_migrations/db/models.py b/shared_migrations/db/models.py
new file mode 100644
index 0000000..bd05abe
--- /dev/null
+++ b/shared_migrations/db/models.py
@@ -0,0 +1,1586 @@
+from datetime import datetime
+from sqlalchemy.orm import relationship
+from sqlalchemy import UUID, Boolean, Float, MetaData, Column, Integer, SmallInteger, String, Text, DateTime, ForeignKey, BigInteger, TypeDecorator, UniqueConstraint, func
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.types import TypeDecorator, DateTime as SA_DateTime
+
+Base = declarative_base()
+# Shared metadata object
+shared_metadata = MetaData()
+
+class DateTime(TypeDecorator):
+ impl = SA_DateTime
+
+ def process_bind_param(self, value, dialect):
+ if isinstance(value, str):
+ try:
+ # Convert string to datetime
+ return datetime.fromisoformat(value)
+ except ValueError:
+ # If conversion fails, return None
+ return None
+ return value
+
+ def process_result_value(self, value, dialect):
+ return value
+
+
+class AppComments(Base):
+ __tablename__ = 'app_comments'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ updated_at = Column(DateTime, nullable=True)
+ api_url = Column(Text, nullable=True)
+ comment_id = Column(BigInteger, nullable=True)
+ issue_id = Column(BigInteger, unique=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': str(self.id),
+ 'updated_at': self.updated_at,
+ 'api_url': self.api_url,
+ 'comment_id': self.comment_id,
+ 'issue_id': self.issue_id
+ }
+
+class Badges(Base):
+ __tablename__ = 'badges'
+ id = Column(UUID(as_uuid=True), primary_key=True)
+ image = Column(Text, nullable=True)
+ text = Column(Text, nullable=True)
+ description = Column(Text, nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+
+ user_badges = relationship('UserBadges', back_populates='badge')
+
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'image': self.image,
+ 'text': self.text,
+ 'description': self.description,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at
+ }
+
+class CcbpTickets(Base):
+ __tablename__ = 'ccbp_tickets'
+ __table_args__ = {'comment': 'A table to store details of CCBP Tickets from various projects'}
+
+ created_at = Column(DateTime, nullable=True)
+ name = Column(Text, nullable=True)
+ product = Column(Text, nullable=True)
+ complexity = Column(Text, nullable=True)
+ project_category = Column(Text, nullable=True)
+ project_sub_category = Column(Text, nullable=True)
+ reqd_skills = Column(Text, nullable=True)
+ issue_id = Column(BigInteger, unique=True)
+ api_endpoint_url = Column(Text, unique=True, nullable=True)
+ url = Column(Text, unique=True, nullable=True)
+ ticket_points = Column(SmallInteger, nullable=True, comment='How many points the ticket is worth')
+ index = Column(SmallInteger, unique=True, autoincrement=True)
+ mentors = Column(Text, nullable=True)
+ uuid = Column(UUID(as_uuid=True), primary_key=True)
+ status = Column(Text, nullable=True)
+ community_label = Column(Boolean, nullable=True, comment='has community label')
+ organization = Column(Text, nullable=True)
+ closed_at = Column(DateTime, nullable=True, comment='date-time at which issue was closed')
+ assignees = Column(Text, nullable=True)
+ issue_author = Column(Text, nullable=True)
+ is_assigned = Column(Boolean, nullable=False)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'created_at': self.created_at,
+ 'name': self.name,
+ 'product': self.product,
+ 'complexity': self.complexity,
+ 'project_category': self.project_category,
+ 'project_sub_category': self.project_sub_category,
+ 'reqd_skills': self.reqd_skills,
+ 'issue_id': self.issue_id,
+ 'api_endpoint_url': self.api_endpoint_url,
+ 'url': self.url,
+ 'ticket_points': self.ticket_points,
+ 'index': self.index,
+ 'mentors': self.mentors,
+ 'uuid': str(self.uuid),
+ 'status': self.status,
+ 'community_label': self.community_label,
+ 'organization': self.organization,
+ 'closed_at': self.closed_at,
+ 'assignees': self.assignees,
+ 'issue_author': self.issue_author,
+ 'is_assigned': self.is_assigned
+ }
+
+class Chapters(Base):
+ __tablename__ = 'chapters'
+
+ id = Column(UUID(as_uuid=True), primary_key=True)
+ type = Column(Text, nullable=True)
+ org_name = Column(Text, unique=True)
+ primary_organisation = Column(Text, nullable=True, comment='the organisation that the chapter is mapped to')
+ sessions = Column(Integer, nullable=True)
+ discord_role_id = Column(BigInteger, unique=True, comment='db id of the corresponding member role in discord server')
+ created_at = Column(DateTime, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'type': self.type,
+ 'org_name': self.org_name,
+ 'primary_organisation': self.primary_organisation,
+ 'sessions': self.sessions,
+ 'discord_role_id': self.discord_role_id,
+ 'created_at': self.created_at
+ }
+
+
+##
+
+class ConnectedPrs(Base):
+ __tablename__ = 'connected_prs'
+
+ id = Column(UUID(as_uuid=True), primary_key=True)
+ created_at = Column(DateTime, nullable=True)
+ api_url = Column(Text, nullable=True)
+ html_url = Column(Text, unique=True, nullable=True)
+ raised_by = Column(BigInteger, nullable=True)
+ raised_at = Column(DateTime, nullable=False)
+ raised_by_username = Column(Text, nullable=False)
+ status = Column(Text, nullable=True)
+ is_merged = Column(Boolean, nullable=True)
+ merged_by = Column(BigInteger, nullable=True)
+ merged_at = Column(Text, nullable=True)
+ merged_by_username = Column(Text, nullable=True)
+ pr_id = Column(BigInteger, nullable=False, comment='github id of the pr')
+ points = Column(SmallInteger, nullable=False)
+ ticket_url = Column(Text, nullable=False)
+ ticket_complexity = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'created_at': self.created_at,
+ 'api_url': self.api_url,
+ 'html_url': self.html_url,
+ 'raised_by': self.raised_by,
+ 'raised_at': self.raised_at,
+ 'raised_by_username': self.raised_by_username,
+ 'status': self.status,
+ 'is_merged': self.is_merged,
+ 'merged_by': self.merged_by,
+ 'merged_at': self.merged_at,
+ 'merged_by_username': self.merged_by_username,
+ 'pr_id': self.pr_id,
+ 'points': self.points,
+ 'ticket_url': self.ticket_url,
+ 'ticket_complexity': self.ticket_complexity
+ }
+
+class ContributorNames(Base):
+ __tablename__ = 'contributor_names'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ discord_id = Column(BigInteger, nullable=False)
+ name = Column(Text, nullable=True)
+ country = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'discord_id': self.discord_id,
+ 'name': self.name,
+ 'country': self.country
+ }
+
+class ContributorsDiscord(Base):
+ __tablename__ = 'contributors_discord'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ discord_id = Column(BigInteger, unique=True, nullable=False)
+ github_id = Column(BigInteger, nullable=True)
+ github_url = Column(String, nullable=True)
+ discord_username = Column(String, nullable=True)
+ joined_at = Column(DateTime, nullable=False)
+ email = Column(Text, nullable=True)
+ field_name = Column(Text, nullable=True, name='name') # Adjusted field name
+ chapter = Column(Text, nullable=True, comment="the chapter they're associated with")
+ gender = Column(Text, nullable=True)
+ is_active = Column(Boolean, nullable=False)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'discord_id': self.discord_id,
+ 'github_id': self.github_id,
+ 'github_url': self.github_url,
+ 'discord_username': self.discord_username,
+ 'joined_at': self.joined_at,
+ 'email': self.email,
+ 'name': self.field_name,
+ 'chapter': self.chapter,
+ 'gender': self.gender,
+ 'is_active': self.is_active
+ }
+
+class ContributorsRegistration(Base):
+ __tablename__ = 'contributors_registration'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ discord_id = Column(BigInteger, unique=True, nullable=False)
+ github_id = Column(BigInteger, unique=True, nullable=False)
+ github_url = Column(String, nullable=False)
+ discord_username = Column(String, nullable=True)
+ joined_at = Column(DateTime, nullable=False)
+ email = Column(Text, nullable=True)
+ name = Column(Text, nullable=True)
+
+ point_transactions = relationship('PointTransactions', back_populates='contributor')
+
+ user_activities = relationship('UserActivity', back_populates='contributor')
+ user_points_mappings = relationship('UserPointsMapping', back_populates='contributors')
+
+
+ def __repr__(self):
+ return f""
+
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'discord_id': self.discord_id,
+ 'github_id': self.github_id,
+ 'github_url': self.github_url,
+ 'discord_username': self.discord_username,
+ 'joined_at': self.joined_at,
+ 'email': self.email,
+ 'name': self.name
+ }
+
+class DiscordChannels(Base):
+ __tablename__ = 'discord_channels'
+
+ channel_id = Column(BigInteger, primary_key=True)
+ channel_name = Column(Text, nullable=True)
+ webhook = Column(Text, nullable=True)
+ should_notify = Column(Boolean, nullable=False)
+
+ products = relationship('Product', back_populates='channel')
+
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'channel_id': self.channel_id,
+ 'channel_name': self.channel_name,
+ 'webhook': self.webhook,
+ 'should_notify': self.should_notify
+ }
+
+class DiscordEngagement(Base):
+ __tablename__ = 'discord_engagement'
+ __table_args__ = {'comment': 'engagement metrics for contributors'}
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ created_at = Column(DateTime, nullable=True)
+ contributor = Column(BigInteger, unique=True, nullable=False)
+ has_introduced = Column(Boolean, nullable=True)
+ total_message_count = Column(BigInteger, nullable=True)
+ total_reaction_count = Column(BigInteger, nullable=True)
+ converserbadge = Column(Boolean, nullable=True)
+ apprenticebadge = Column(Boolean, nullable=True)
+ rockstarbadge = Column(Boolean, nullable=True)
+ enthusiastbadge = Column(Boolean, nullable=True)
+ risingstarbadge = Column(Boolean, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'created_at': self.created_at,
+ 'contributor': self.contributor,
+ 'has_introduced': self.has_introduced,
+ 'total_message_count': self.total_message_count,
+ 'total_reaction_count': self.total_reaction_count,
+ 'converserbadge': self.converserbadge,
+ 'apprenticebadge': self.apprenticebadge,
+ 'rockstarbadge': self.rockstarbadge,
+ 'enthusiastbadge': self.enthusiastbadge,
+ 'risingstarbadge': self.risingstarbadge
+ }
+
+class DmpIssueUpdates(Base):
+ __tablename__ = 'dmp_issue_updates'
+ __table_args__ = {'comment': 'Having records of dmp with issue details'}
+
+ created_at = Column(DateTime, nullable=False)
+ body_text = Column(Text, nullable=True)
+ comment_link = Column(Text, nullable=True)
+ comment_id = Column(BigInteger, primary_key=True)
+ comment_api = Column(String, nullable=True)
+ comment_updated_at = Column(DateTime, nullable=True)
+ dmp_id = Column(BigInteger, ForeignKey('dmp_issues.id'), nullable=False)
+ created_by = Column(Text, nullable=False)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'created_at': self.created_at,
+ 'body_text': self.body_text,
+ 'comment_link': self.comment_link,
+ 'comment_id': self.comment_id,
+ 'comment_api': self.comment_api,
+ 'comment_updated_at': self.comment_updated_at,
+ 'dmp_id': self.dmp_id,
+ 'created_by': self.created_by
+ }
+
+
+class DmpIssues(Base):
+ __tablename__ = 'dmp_issues'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ issue_url = Column(String, nullable=False)
+ issue_number = Column(BigInteger, nullable=False)
+ mentor_username = Column(Text, nullable=True)
+ contributor_username = Column(Text, nullable=True)
+ title = Column(Text, nullable=False)
+ org_id = Column(BigInteger, ForeignKey('dmp_orgs.id'), nullable=False)
+ description = Column(Text, nullable=False)
+ repo = Column(Text, nullable=False)
+ repo_owner = Column(Text, nullable=False)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'issue_url': self.issue_url,
+ 'issue_number': self.issue_number,
+ 'mentor_username': self.mentor_username,
+ 'contributor_username': self.contributor_username,
+ 'title': self.title,
+ 'org_id': self.org_id,
+ 'description': self.description,
+ 'repo': self.repo,
+ 'repo_owner': self.repo_owner
+ }
+
+class DmpOrgs(Base):
+ __tablename__ = 'dmp_orgs'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ created_at = Column(DateTime, nullable=False)
+ name = Column(Text, nullable=False)
+ description = Column(Text, nullable=False)
+ link = Column(Text, nullable=False)
+ repo_owner = Column(Text, nullable=False)
+
+ # issues = relationship('Issues', backref='organization', lazy='joined')
+
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'created_at': self.created_at,
+ 'name': self.name,
+ 'description': self.description,
+ 'link': self.link,
+ 'repo_owner': self.repo_owner
+ }
+
+class DmpPrUpdates(Base):
+ __tablename__ = 'dmp_pr_updates'
+ __table_args__ = {'comment': 'Having PR related records'}
+
+ created_at = Column(DateTime, nullable=False)
+ pr_id = Column(BigInteger, primary_key=True)
+ status = Column(String, nullable=False)
+ title = Column(Text, nullable=False)
+ pr_updated_at = Column(DateTime, nullable=True)
+ merged_at = Column(DateTime, nullable=True)
+ closed_at = Column(DateTime, nullable=True)
+ dmp_id = Column(BigInteger, ForeignKey('dmp_issues.id'), nullable=False)
+ link = Column(Text, nullable=False)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'created_at': self.created_at,
+ 'pr_id': self.pr_id,
+ 'status': self.status,
+ 'title': self.title,
+ 'pr_updated_at': self.pr_updated_at,
+ 'merged_at': self.merged_at,
+ 'closed_at': self.closed_at,
+ 'dmp_id': self.dmp_id,
+ 'link': self.link
+ }
+
+class DmpTickets(Base):
+ __tablename__ = 'dmp_tickets'
+
+ created_at = Column(DateTime, nullable=True)
+ name = Column(Text, nullable=True)
+ product = Column(Text, nullable=True)
+ complexity = Column(Text, nullable=True)
+ project_category = Column(Text, nullable=True)
+ project_sub_category = Column(Text, nullable=True)
+ reqd_skills = Column(Text, nullable=True)
+ issue_id = Column(BigInteger, unique=True, nullable=False)
+ api_endpoint_url = Column(Text, unique=True, nullable=True)
+ url = Column(Text, unique=True, nullable=True)
+ ticket_points = Column(Integer, nullable=True, comment='How many points the ticket is worth')
+ index = Column(Integer, unique=True, autoincrement=True)
+ mentors = Column(Text, nullable=True)
+ uuid = Column(UUID(as_uuid=True), primary_key=True)
+ status = Column(Text, nullable=True)
+ community_label = Column(Boolean, nullable=True, comment='has community label')
+ organization = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'created_at': self.created_at,
+ 'name': self.name,
+ 'product': self.product,
+ 'complexity': self.complexity,
+ 'project_category': self.project_category,
+ 'project_sub_category': self.project_sub_category,
+ 'reqd_skills': self.reqd_skills,
+ 'issue_id': self.issue_id,
+ 'api_endpoint_url': self.api_endpoint_url,
+ 'url': self.url,
+ 'ticket_points': self.ticket_points,
+ 'index': self.index,
+ 'mentors': self.mentors,
+ 'uuid': self.uuid,
+ 'status': self.status,
+ 'community_label': self.community_label,
+ 'organization': self.organization
+ }
+
+class DmpWeekUpdates(Base):
+ __tablename__ = 'dmp_week_updates'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ issue_url = Column(Text, nullable=False)
+ week = Column(BigInteger, nullable=True)
+ total_task = Column(BigInteger, nullable=True)
+ completed_task = Column(BigInteger, nullable=True)
+ progress = Column(Float, nullable=True)
+ task_data = Column(Text, nullable=True)
+ dmp_id = Column(BigInteger, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'issue_url': self.issue_url,
+ 'week': self.week,
+ 'total_task': self.total_task,
+ 'completed_task': self.completed_task,
+ 'progress': self.progress,
+ 'task_data': self.task_data,
+ 'dmp_id': self.dmp_id
+ }
+
+class GithubClassroomData(Base):
+ __tablename__ = 'github_classroom_data'
+ __table_args__ = {'comment': 'Table for saving the details about github classroom assignment data'}
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ created_at = Column(DateTime, nullable=False)
+ assignment_name = Column(Text, nullable=False)
+ assignment_url = Column(Text, nullable=False)
+ assignment_id = Column(Text, nullable=True)
+ starter_code_url = Column(Text, nullable=False)
+ github_username = Column(Text, nullable=True)
+ roster_identifier = Column(Text, nullable=True)
+ student_repository_name = Column(Text, nullable=True)
+ student_repository_url = Column(Text, nullable=True)
+ submission_timestamp = Column(DateTime, nullable=False)
+ points_awarded = Column(Integer, nullable=True)
+ points_available = Column(Integer, nullable=True)
+ c4gt_points = Column(Integer, nullable=True)
+ discord_id = Column(Text, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'created_at': self.created_at,
+ 'assignment_name': self.assignment_name,
+ 'assignment_url': self.assignment_url,
+ 'assignment_id': self.assignment_id,
+ 'starter_code_url': self.starter_code_url,
+ 'github_username': self.github_username,
+ 'roster_identifier': self.roster_identifier,
+ 'student_repository_name': self.student_repository_name,
+ 'student_repository_url': self.student_repository_url,
+ 'submission_timestamp': self.submission_timestamp,
+ 'points_awarded': self.points_awarded,
+ 'points_available': self.points_available,
+ 'c4gt_points': self.c4gt_points,
+ 'discord_id': self.discord_id,
+ 'updated_at': self.updated_at
+ }
+
+class GithubInstallations(Base):
+ __tablename__ = 'github_installations'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ github_organisation = Column(Text, unique=True, nullable=False)
+ installation_id = Column(BigInteger, unique=True, nullable=False)
+ target_type = Column(Text, nullable=True, comment='Type of github entity that installed the app, usually "Organisation"')
+ github_ids = Column(Text, nullable=True, comment="Identifiers on the github database, prolly won't be used")
+ permissions_and_events = Column(Text, nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ organisation = Column(Text, ForeignKey('community_orgs.name'), nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'github_organisation': self.github_organisation,
+ 'installation_id': self.installation_id,
+ 'target_type': self.target_type,
+ 'github_ids': self.github_ids,
+ 'permissions_and_events': self.permissions_and_events,
+ 'created_at': self.created_at,
+ 'organisation': self.organisation
+ }
+##
+
+class GithubOrganisationsToOrganisations(Base):
+ __tablename__ = 'github_organisations_to_organisations'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ github_organisation = Column(Text, nullable=False)
+ organisation = Column(Text, nullable=True)
+ created_at = Column(DateTime, nullable=True, comment='Creation date of organization ticket')
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'github_organisation': self.github_organisation,
+ 'organisation': self.organisation,
+ 'created_at': self.created_at
+ }
+
+class IssueContributors(Base):
+ __tablename__ = 'issue_contributors'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ contributor_id = Column(BigInteger, ForeignKey('contributors_registration.id'))
+ issue_id = Column(BigInteger, ForeignKey('issues.id'), primary_key=True)
+ role = Column(BigInteger, ForeignKey('role_master.id'), nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'contributor_id': self.contributor_id,
+ 'issue_id': self.issue_id,
+ 'role_id': self.role,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at
+ }
+
+class IssueMentors(Base):
+ __tablename__ = 'issue_mentors'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ issue_id = Column(BigInteger, ForeignKey('issues.id'))
+ org_mentor_id = Column(Text, nullable=True)
+ angel_mentor_id = Column(BigInteger, ForeignKey('contributors_registration.id'))
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'issue_id': self.issue_id,
+ 'org_mentor_id': self.org_mentor_id,
+ 'angel_mentor_id': self.angel_mentor_id,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at
+ }
+
+class Issues(Base):
+ __tablename__ = 'issues'
+
+ id = Column(BigInteger, primary_key=True)
+ link = Column(Text, nullable=False)
+ labels = Column(Text, nullable=True)
+ project_type = Column(Text, nullable=True)
+ complexity = Column(Text, nullable=True)
+ skills = Column(Text, nullable=True)
+ technology = Column(Text, nullable=True)
+ status = Column(Text, nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+ title = Column(Text, nullable=True)
+ domain = Column(Text, nullable=True)
+ description = Column(Text, nullable=True)
+ org_id = Column(BigInteger, ForeignKey('community_orgs.id'), nullable=True)
+ issue_id = Column(BigInteger, unique=True)
+
+ point_transactions = relationship('PointTransactions', back_populates='issue')
+ user_activities = relationship('UserActivity', back_populates='issue')
+
+
+
+ def __repr__(self):
+ return f""
+
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'link': self.link,
+ 'labels': self.labels,
+ 'complexity': self.complexity,
+ 'skills': self.skills,
+ 'technology': self.technology,
+ 'status': self.status,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at,
+ 'title': self.title,
+ 'description': self.description,
+ 'org_id': self.org_id,
+ 'issue_id': self.issue_id,
+ 'project_type':self.project_type,
+ 'domain': self.domain
+ }
+
+class MentorDetails(Base):
+ __tablename__ = 'mentor_details'
+
+ id = Column(BigInteger, primary_key=True)
+ name = Column(String(255), nullable=True)
+ email = Column(String(255), nullable=True)
+ discord_id = Column(String(255), nullable=True)
+ discord_username = Column(String(255), nullable=True)
+ github_id = Column(String(255), nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+
+ point_transactions = relationship('PointTransactions', back_populates='mentor')
+ user_activities = relationship('UserActivity', back_populates='mentor')
+ user_points_mappings = relationship('UserPointsMapping', back_populates='mentor')
+
+
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'name': self.name,
+ 'email': self.email,
+ 'discord_id': self.discord_id,
+ 'discord_username': self.discord_username,
+ 'github_id': self.github_id,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at
+ }
+
+class MentorshipProgramSiteStructure(Base):
+ __tablename__ = 'mentorship_program_site_structure'
+
+ id = Column(BigInteger, primary_key=True)
+ product_id = Column(BigInteger, ForeignKey('product.id'), nullable=True)
+ project_id = Column(BigInteger, nullable=True)
+ contributor_id = Column(BigInteger, nullable=True)
+ website_directory_label = Column(Text, nullable=True)
+ directory_url = Column(Text, nullable=True)
+
+ # project = relationship('MentorshipProgramProjects', back_populates='site_structures')
+ # contributor = relationship('MentorshipProgramSelectedContributors', back_populates='site_structures')
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'product_id': self.product_id,
+ 'project_id': self.project_id,
+ 'contributor_id': self.contributor_id,
+ 'website_directory_label': self.website_directory_label,
+ 'directory_url': self.directory_url
+ }
+
+class MentorshipProgramWebsiteComments(Base):
+ __tablename__ = 'mentorship_program_website_comments'
+
+ comment_id = Column(BigInteger, primary_key=True)
+ url = Column(Text, nullable=True)
+ html_url = Column(Text, nullable=True)
+ commented_by_username = Column(Text, nullable=True)
+ commented_by_id = Column(BigInteger, nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+ body = Column(Text, nullable=True)
+ pr_id = Column(BigInteger, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'comment_id': self.comment_id,
+ 'url': self.url,
+ 'html_url': self.html_url,
+ 'commented_by_username': self.commented_by_username,
+ 'commented_by_id': self.commented_by_id,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at,
+ 'body': self.body,
+ 'pr_id': self.pr_id
+ }
+
+class MentorshipProgramWebsiteCommits(Base):
+ __tablename__ = 'mentorship_program_website_commits'
+
+ node_id = Column(Text, primary_key=True)
+ url = Column(Text, nullable=True)
+ html_url = Column(Text, nullable=True)
+ comment_count = Column(Integer, nullable=True)
+ date = Column(DateTime, nullable=True)
+ author_id = Column(BigInteger, nullable=True)
+ author_username = Column(Text, nullable=True)
+ author_email = Column(Text, nullable=True)
+ committer_id = Column(BigInteger, nullable=True)
+ committer_username = Column(Text, nullable=True)
+ committer_email = Column(Text, nullable=True)
+ additions = Column(Integer, nullable=True)
+ deletions = Column(Integer, nullable=True)
+ files = Column(Text, nullable=True)
+ project_folder_name = Column(Text, nullable=True)
+ pr_id = Column(BigInteger, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'node_id': self.node_id,
+ 'url': self.url,
+ 'html_url': self.html_url,
+ 'comment_count': self.comment_count,
+ 'date': self.date,
+ 'author_id': self.author_id,
+ 'author_username': self.author_username,
+ 'author_email': self.author_email,
+ 'committer_id': self.committer_id,
+ 'committer_username': self.committer_username,
+ 'committer_email': self.committer_email,
+ 'additions': self.additions,
+ 'deletions': self.deletions,
+ 'files': self.files,
+ 'project_folder_name': self.project_folder_name,
+ 'pr_id': self.pr_id
+ }
+
+class MentorshipProgramWebsiteHasUpdated(Base):
+ __tablename__ = 'mentorship_program_website_has_updated'
+
+ id = Column(BigInteger, primary_key=True)
+ project_id = Column(BigInteger, nullable=True)
+ week1_update_date = Column(DateTime, nullable=True)
+ week2_update_date = Column(DateTime, nullable=True)
+ week3_update_date = Column(DateTime, nullable=True)
+ week4_update_date = Column(DateTime, nullable=True)
+ week5_update_date = Column(DateTime, nullable=True)
+ week6_update_date = Column(DateTime, nullable=True)
+ week7_update_date = Column(DateTime, nullable=True)
+ week8_update_date = Column(DateTime, nullable=True)
+ week9_update_date = Column(DateTime, nullable=True)
+ week1_is_default_text = Column(Boolean, nullable=True)
+ week2_is_default_text = Column(Boolean, nullable=True)
+ week3_is_default_text = Column(Boolean, nullable=True)
+ week4_is_default_text = Column(Boolean, nullable=True)
+ week5_is_default_text = Column(Boolean, nullable=True)
+ week6_is_default_text = Column(Boolean, nullable=True)
+ week7_is_default_text = Column(Boolean, nullable=True)
+ week8_is_default_text = Column(Boolean, nullable=True)
+ week9_is_default_text = Column(Boolean, nullable=True)
+ product = Column(Text, nullable=True)
+ project_folder = Column(Text, unique=True, nullable=False)
+ all_links = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'project_id': self.project_id,
+ 'week1_update_date': self.week1_update_date,
+ 'week2_update_date': self.week2_update_date,
+ 'week3_update_date': self.week3_update_date,
+ 'week4_update_date': self.week4_update_date,
+ 'week5_update_date': self.week5_update_date,
+ 'week6_update_date': self.week6_update_date,
+ 'week7_update_date': self.week7_update_date,
+ 'week8_update_date': self.week8_update_date,
+ 'week9_update_date': self.week9_update_date,
+ 'week1_is_default_text': self.week1_is_default_text,
+ 'week2_is_default_text': self.week2_is_default_text,
+ 'week3_is_default_text': self.week3_is_default_text,
+ 'week4_is_default_text': self.week4_is_default_text,
+ 'week5_is_default_text': self.week5_is_default_text,
+ 'week6_is_default_text': self.week6_is_default_text,
+ 'week7_is_default_text': self.week7_is_default_text,
+ 'week8_is_default_text': self.week8_is_default_text,
+ 'week9_is_default_text': self.week9_is_default_text,
+ 'product': self.product,
+ 'project_folder': self.project_folder,
+ 'all_links': self.all_links
+ }
+
+
+
+##
+
+class MentorshipProgramWebsitePullRequest(Base):
+ __tablename__ = 'mentorship_program_website_pull_request'
+
+ pr_url = Column(Text, nullable=True)
+ pr_id = Column(BigInteger, primary_key=True)
+ pr_node_id = Column(Text, unique=True, nullable=True)
+ html_url = Column(Text, nullable=True)
+ status = Column(Text, nullable=True)
+ title = Column(Text, nullable=True)
+ raised_by_username = Column(Text, nullable=True)
+ raised_by_id = Column(Integer, nullable=True)
+ body = Column(Text, nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+ closed_at = Column(DateTime, nullable=True)
+ merged_at = Column(DateTime, nullable=True)
+ assignees = Column(Text, nullable=True)
+ requested_reviewers = Column(Text, nullable=True)
+ labels = Column(Text, nullable=True)
+ review_comments_url = Column(Text, nullable=True)
+ comments_url = Column(Text, nullable=True)
+ repository_id = Column(Integer, nullable=True)
+ repository_owner_name = Column(Text, nullable=True)
+ repository_owner_id = Column(Integer, nullable=True)
+ repository_url = Column(Text, nullable=True)
+ merged = Column(Boolean, nullable=True)
+ number_of_commits = Column(Integer, nullable=True)
+ number_of_comments = Column(Integer, nullable=True)
+ lines_of_code_added = Column(Integer, nullable=True)
+ lines_of_code_removed = Column(Integer, nullable=True)
+ number_of_files_changed = Column(Integer, nullable=True)
+ merged_by_id = Column(BigInteger, nullable=True)
+ merged_by_username = Column(Text, nullable=True)
+ linked_ticket = Column(Text, nullable=True)
+ project_name = Column(Text, nullable=True)
+ project_folder_label = Column(Text, nullable=True)
+ week_number = Column(SmallInteger, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'pr_url': self.pr_url,
+ 'pr_id': self.pr_id,
+ 'pr_node_id': self.pr_node_id,
+ 'html_url': self.html_url,
+ 'status': self.status,
+ 'title': self.title,
+ 'raised_by_username': self.raised_by_username,
+ 'raised_by_id': self.raised_by_id,
+ 'body': self.body,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at,
+ 'closed_at': self.closed_at,
+ 'merged_at': self.merged_at,
+ 'assignees': self.assignees,
+ 'requested_reviewers': self.requested_reviewers,
+ 'labels': self.labels,
+ 'review_comments_url': self.review_comments_url,
+ 'comments_url': self.comments_url,
+ 'repository_id': self.repository_id,
+ 'repository_owner_name': self.repository_owner_name,
+ 'repository_owner_id': self.repository_owner_id,
+ 'repository_url': self.repository_url,
+ 'merged': self.merged,
+ 'number_of_commits': self.number_of_commits,
+ 'number_of_comments': self.number_of_comments,
+ 'lines_of_code_added': self.lines_of_code_added,
+ 'lines_of_code_removed': self.lines_of_code_removed,
+ 'number_of_files_changed': self.number_of_files_changed,
+ 'merged_by_id': self.merged_by_id,
+ 'merged_by_username': self.merged_by_username,
+ 'linked_ticket': self.linked_ticket,
+ 'project_name': self.project_name,
+ 'project_folder_label': self.project_folder_label,
+ 'week_number': self.week_number
+ }
+
+class MentorshipWebsiteContributorProject(Base):
+ __tablename__ = 'mentorship_website_contributor_project'
+
+ project_folder = Column(Text, primary_key=True)
+ contributor = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'project_folder': self.project_folder,
+ 'contributor': self.contributor
+ }
+
+class PointSystem(Base):
+ __tablename__ = 'point_system'
+
+ id = Column(BigInteger, primary_key=True)
+ complexity = Column(Text, nullable=False)
+ points = Column(SmallInteger, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'complexity': self.complexity,
+ 'points': self.points
+ }
+
+class PointTransactions(Base):
+ __tablename__ = 'point_transactions'
+
+ id = Column(BigInteger, primary_key=True)
+ user_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True)
+ issue_id = Column(BigInteger, ForeignKey('issues.id'), nullable=False)
+ point = Column(Integer, nullable=True)
+ type = Column(Text, nullable=True)
+ created_at = Column(DateTime, default=func.now(), nullable=False) # Set to current time when created
+ updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False) # Updated to current time when record is modified
+ angel_mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True)
+
+
+ contributor = relationship('ContributorsRegistration', back_populates='point_transactions')
+ issue = relationship('Issues', back_populates='point_transactions')
+ mentor = relationship('MentorDetails', back_populates='point_transactions')
+
+ def __repr__(self):
+ return f""
+
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'user_id': self.user_id,
+ 'issue_id': self.issue_id,
+ 'point': self.point,
+ 'type': self.type,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at,
+ 'angel_mentor_id': self.angel_mentor_id
+ }
+
+class PointsMapping(Base):
+ __tablename__ = 'points_mapping'
+
+ id = Column(BigInteger, primary_key=True)
+ role = Column(String(50), nullable=False)
+ complexity = Column(String(50), nullable=False)
+ points = Column(Integer, nullable=False)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'role': self.role,
+ 'complexity': self.complexity,
+ 'points': self.points,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at
+ }
+
+
+
+###
+
+class PrHistory(Base):
+ __tablename__ = 'pr_history'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ created_at = Column(DateTime, nullable=True)
+ api_url = Column(Text, nullable=True)
+ html_url = Column(Text, unique=True, nullable=True)
+ raised_by = Column(BigInteger, nullable=True)
+ raised_at = Column(DateTime, nullable=False)
+ raised_by_username = Column(Text, nullable=False)
+ status = Column(Text, nullable=True)
+ is_merged = Column(Boolean, nullable=True)
+ merged_by = Column(BigInteger, nullable=True)
+ merged_at = Column(Text, nullable=True)
+ merged_by_username = Column(Text, nullable=True)
+ pr_id = Column(BigInteger, nullable=False)
+ ticket_url = Column(Text, nullable=False)
+ ticket_complexity = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'created_at': self.created_at,
+ 'api_url': self.api_url,
+ 'html_url': self.html_url,
+ 'raised_by': self.raised_by,
+ 'raised_at': self.raised_at,
+ 'raised_by_username': self.raised_by_username,
+ 'status': self.status,
+ 'is_merged': self.is_merged,
+ 'merged_by': self.merged_by,
+ 'merged_at': self.merged_at,
+ 'merged_by_username': self.merged_by_username,
+ 'pr_id': self.pr_id,
+ 'ticket_url': self.ticket_url,
+ 'ticket_complexity': self.ticket_complexity
+ }
+
+class PrStaging(Base):
+ __tablename__ = 'pr_staging'
+
+ id = Column(String(36), primary_key=True) # UUID field
+ created_at = Column(DateTime, nullable=True)
+ api_url = Column(Text, nullable=True)
+ html_url = Column(Text, unique=True, nullable=True)
+ raised_by = Column(BigInteger, nullable=True)
+ raised_at = Column(DateTime, nullable=False)
+ raised_by_username = Column(Text, nullable=False)
+ status = Column(Text, nullable=True)
+ is_merged = Column(Boolean, nullable=True)
+ merged_by = Column(BigInteger, nullable=True)
+ merged_at = Column(Text, nullable=True)
+ merged_by_username = Column(Text, nullable=True)
+ pr_id = Column(BigInteger, nullable=False)
+ points = Column(SmallInteger, nullable=False)
+ ticket_url = Column(Text, nullable=False)
+ ticket_complexity = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'created_at': self.created_at,
+ 'api_url': self.api_url,
+ 'html_url': self.html_url,
+ 'raised_by': self.raised_by,
+ 'raised_at': self.raised_at,
+ 'raised_by_username': self.raised_by_username,
+ 'status': self.status,
+ 'is_merged': self.is_merged,
+ 'merged_by': self.merged_by,
+ 'merged_at': self.merged_at,
+ 'merged_by_username': self.merged_by_username,
+ 'pr_id': self.pr_id,
+ 'points': self.points,
+ 'ticket_url': self.ticket_url,
+ 'ticket_complexity': self.ticket_complexity
+ }
+
+class Product(Base):
+ __tablename__ = 'product'
+
+ id = Column(BigInteger, primary_key=True) # Auto field
+ name = Column(Text, unique=True, nullable=False)
+ description = Column(Text, nullable=True)
+ wiki_url = Column(Text, nullable=True)
+ channel_id = Column(BigInteger, ForeignKey('discord_channels.channel_id'), nullable=True) # Assumes 'DiscordChannels' model
+
+ channel = relationship('DiscordChannels', back_populates='products')
+
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'name': self.name,
+ 'description': self.description,
+ 'wiki_url': self.wiki_url,
+ 'channel_id': self.channel_id
+ }
+
+class RoleMaster(Base):
+ __tablename__ = 'role_master'
+
+ id = Column(BigInteger, primary_key=True) # Auto field
+ created_at = Column(DateTime, nullable=False)
+ updated_at = Column(DateTime, nullable=True)
+ role = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at,
+ 'role': self.role
+ }
+
+class TicketComments(Base):
+ __tablename__ = 'ticket_comments'
+
+ id = Column(BigInteger, primary_key=True)
+ url = Column(Text, nullable=True)
+ html_url = Column(Text, nullable=True)
+ issue_url = Column(Text, nullable=True)
+ node_id = Column(Text, nullable=True)
+ comment_id = Column(BigInteger, nullable=True)
+ issue_id = Column(BigInteger, nullable=True)
+ commented_by = Column(Text, nullable=True)
+ commented_by_id = Column(BigInteger, nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+ content = Column(Text, nullable=True)
+ reactions_url = Column(Text, nullable=True)
+ ticket_url = Column(Text, nullable=False)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'url': self.url,
+ 'html_url': self.html_url,
+ 'issue_url': self.issue_url,
+ 'node_id': self.node_id,
+ 'commented_by': self.commented_by,
+ 'commented_by_id': self.commented_by_id,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at,
+ 'content': self.content,
+ 'reactions_url': self.reactions_url,
+ 'ticket_url': self.ticket_url
+ }
+
+class UnlistedTickets(Base):
+ __tablename__ = 'unlisted_tickets'
+
+ created_at = Column(DateTime, nullable=True)
+ name = Column(Text, nullable=True)
+ product = Column(Text, nullable=True)
+ complexity = Column(Text, nullable=True)
+ project_category = Column(Text, nullable=True)
+ project_sub_category = Column(Text, nullable=True)
+ reqd_skills = Column(Text, nullable=True)
+ issue_id = Column(BigInteger, unique=True, nullable=False)
+ api_endpoint_url = Column(Text, unique=True, nullable=True)
+ url = Column(Text, unique=True, nullable=True)
+ ticket_points = Column(SmallInteger, nullable=True)
+ index = Column(SmallInteger, unique=True, nullable=False)
+ mentors = Column(Text, nullable=True)
+ uuid = Column(String(36), primary_key=True) # UUID field
+ status = Column(Text, nullable=True)
+ organization = Column(Text, nullable=True)
+
+ __table_args__ = (UniqueConstraint('uuid', 'issue_id'),)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'created_at': self.created_at,
+ 'name': self.name,
+ 'product': self.product,
+ 'complexity': self.complexity,
+ 'project_category': self.project_category,
+ 'project_sub_category': self.project_sub_category,
+ 'reqd_skills': self.reqd_skills,
+ 'issue_id': self.issue_id,
+ 'api_endpoint_url': self.api_endpoint_url,
+ 'url': self.url,
+ 'ticket_points': self.ticket_points,
+ 'index': self.index,
+ 'mentors': self.mentors,
+ 'uuid': self.uuid,
+ 'status': self.status,
+ 'organization': self.organization
+ }
+
+class UnstructuredDiscordData(Base):
+ __tablename__ = 'unstructured_discord_data'
+
+ text = Column(Text, nullable=True)
+ author = Column(BigInteger, nullable=True)
+ channel = Column(BigInteger, nullable=True)
+ channel_name = Column(Text, nullable=True)
+ uuid = Column(String(36), primary_key=True) # UUID field
+ author_name = Column(Text, nullable=True)
+ author_roles = Column(Text, nullable=True)
+ sent_at = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'text': self.text,
+ 'author': self.author,
+ 'channel': self.channel,
+ 'channel_name': self.channel_name,
+ 'uuid': self.uuid,
+ 'author_name': self.author_name,
+ 'author_roles': self.author_roles,
+ 'sent_at': self.sent_at
+ }
+
+class UserActivity(Base):
+ __tablename__ = 'user_activity'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ contributor_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=False) # Assumes 'ContributorsRegistration' model
+ issue_id = Column(BigInteger, ForeignKey('issues.id'), nullable=False) # Assumes 'Issues' model
+ activity = Column(Text, nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+ mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True) # Assumes 'MentorDetails' model
+
+ contributor = relationship('ContributorsRegistration', back_populates='user_activities')
+ issue = relationship('Issues', back_populates='user_activities')
+ mentor = relationship('MentorDetails', back_populates='user_activities')
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'contributor_id': self.contributor_id,
+ 'issue_id': self.issue_id,
+ 'activity': self.activity,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at,
+ 'mentor_id': self.mentor_id
+ }
+
+class UserBadges(Base):
+ __tablename__ = 'user_badges'
+ id = Column(UUID(as_uuid=True), primary_key=True)
+ user_id = Column(BigInteger, ForeignKey('users.id'), nullable=False) # Assumes 'Users' model
+ badge_id = Column(BigInteger, ForeignKey('badges.id'), nullable=False) # Assumes 'Badges' model
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+
+ user = relationship('Users', back_populates='user_badges')
+ badge = relationship('Badges', back_populates='user_badges')
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'user_id': self.user_id,
+ 'badge_id': self.badge_id,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at
+ }
+
+class UserCertificates(Base):
+ __tablename__ = 'user_certificates'
+ id = Column(UUID(as_uuid=True), primary_key=True)
+ user_id = Column(BigInteger, ForeignKey('users.id'), nullable=False) # Assumes 'Users' model
+ certificate_link = Column(Text, nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+
+ user = relationship('Users', back_populates='user_certificates')
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'user_id': self.user_id,
+ 'certificate_link': self.certificate_link,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at
+ }
+
+
+
+###
+
+class UserPointsMapping(Base):
+ __tablename__ = 'user_points_mapping'
+ id = Column(UUID(as_uuid=True), primary_key=True)
+ contributor = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True) # Assumes 'ContributorsRegistration' model
+ points = Column(Integer, nullable=False)
+ level = Column(String(50), nullable=True)
+ created_at = Column(DateTime, default=func.now(), nullable=False) # Set to current time when created
+ updated_at = Column(DateTime, default=func.now(), onupdate=func.now(), nullable=False)
+ mentor_id = Column(BigInteger, ForeignKey('mentor_details.id'), nullable=True) # Assumes 'MentorDetails' model
+
+ contributors = relationship('ContributorsRegistration', back_populates='user_points_mappings')
+ mentor = relationship('MentorDetails', back_populates='user_points_mappings')
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'contributor_id': self.contributor,
+ 'points': self.points,
+ 'level': self.level,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at,
+ 'mentor_id': self.mentor_id
+ }
+
+class Users(Base):
+ __tablename__ = 'users'
+
+ id = Column(BigInteger, primary_key=True) # Assumes id is the primary key
+ name = Column(Text, nullable=True)
+ discord = Column(Text, unique=True, nullable=True)
+ github = Column(Text, nullable=True)
+ points = Column(Integer, nullable=True)
+ level = Column(Text, nullable=True)
+ created_at = Column(DateTime, nullable=True)
+ updated_at = Column(DateTime, nullable=True)
+
+ user_badges = relationship('UserBadges', back_populates='user')
+ user_certificates = relationship('UserCertificates', back_populates='user')
+
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'name': self.name,
+ 'discord': self.discord,
+ 'github': self.github,
+ 'points': self.points,
+ 'level': self.level,
+ 'created_at': self.created_at,
+ 'updated_at': self.updated_at
+ }
+
+class VcLogs(Base):
+ __tablename__ = 'vc_logs'
+
+ id = Column(BigInteger, primary_key=True) # Auto field
+ created_at = Column(DateTime, nullable=False)
+ discord_id = Column(BigInteger, nullable=True)
+ discord_name = Column(Text, nullable=True)
+ option = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'created_at': self.created_at,
+ 'discord_id': self.discord_id,
+ 'discord_name': self.discord_name,
+ 'option': self.option
+ }
+
+class GitHubProfileData(Base):
+ __tablename__ = 'github_profile_data'
+
+ github_username = Column(String, primary_key=True)
+ discord_id = Column(BigInteger, nullable=False)
+ classroom_points = Column(Integer, nullable=False, default=0)
+ prs_raised = Column(Integer, nullable=False, default=0)
+ prs_reviewed = Column(Integer, nullable=False, default=0)
+ prs_merged = Column(Integer, nullable=False, default=0)
+ dpg_points = Column(Integer, nullable=False, default=0)
+ milestone = Column(Integer, nullable=False, default=0)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'github_username': self.github_username,
+ 'discord_id': self.discord_id,
+ 'classroom_points': self.classroom_points,
+ 'prs_raised': self.prs_raised,
+ 'prs_reviewed': self.prs_reviewed,
+ 'prs_merged': self.prs_merged,
+ 'dpg_points': self.dpg_points,
+ 'milestone': self.milestone,
+ }
+
+class CommunityOrgs(Base):
+ __tablename__ = 'community_orgs'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ name = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'name': self.name
+ }
+
+
+
+class ContributorPoints(Base):
+ __tablename__ = 'contributor_points'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ contributors_id = Column(BigInteger, ForeignKey('contributors_registration.id'), nullable=True)
+ total_points = Column(Integer, nullable=False, default=0)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'contributors_id': self.contributors_id,
+ 'total_points': self.total_points
+ }
+
+class MentorNotAdded(Base):
+ __tablename__ = 'mentor_not_added'
+
+ id = Column(BigInteger, primary_key=True, autoincrement=True)
+ mentor_github_id = Column(BigInteger, nullable=True)
+ issue_id = Column(BigInteger, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'id': self.id,
+ 'mentor_github_id': self.mentor_github_id,
+ 'issue_id': self.issue_id
+ }
+
+
+
+class Leaderboard(Base):
+ __tablename__ = 'leaderboard'
+
+ discord_id = Column(BigInteger, primary_key=True, autoincrement=False)
+ github_id = Column(BigInteger, nullable=False)
+ github_url = Column(Text, nullable=False)
+ apprentice_badge = Column(Boolean, nullable=True)
+ converser_badge = Column(Boolean, nullable=False, default=False)
+ rockstar_badge = Column(Boolean, nullable=False, default=False)
+ enthusiast_badge = Column(Boolean, nullable=False, default=False)
+ rising_star_badge = Column(Boolean, nullable=False, default=False)
+ github_x_discord_badge = Column(Boolean, nullable=False, default=False)
+ points = Column(Integer, nullable=False, default=0)
+ bronze_badge = Column(Boolean, nullable=False, default=False)
+ silver_badge = Column(Boolean, nullable=False, default=False)
+ gold_badge = Column(Boolean, nullable=False, default=False)
+ ruby_badge = Column(Boolean, nullable=False, default=False)
+ diamond_badge = Column(Boolean, nullable=False, default=False)
+ certificate_link = Column(Text, nullable=True)
+
+ def __repr__(self):
+ return f""
+
+ def to_dict(self):
+ return {
+ 'discord_id': self.discord_id,
+ 'github_id': self.github_id,
+ 'github_url': self.github_url,
+ 'apprentice_badge': self.apprentice_badge,
+ 'converser_badge': self.converser_badge,
+ 'rockstar_badge': self.rockstar_badge,
+ 'enthusiast_badge': self.enthusiast_badge,
+ 'rising_star_badge': self.rising_star_badge,
+ 'github_x_discord_badge': self.github_x_discord_badge,
+ 'points': self.points,
+ 'bronze_badge': self.bronze_badge,
+ 'silver_badge': self.silver_badge,
+ 'gold_badge': self.gold_badge,
+ 'ruby_badge': self.ruby_badge,
+ 'diamond_badge': self.diamond_badge,
+ 'certificate_link': self.certificate_link
+ }
\ No newline at end of file
diff --git a/shared_migrations/db/server.py b/shared_migrations/db/server.py
new file mode 100644
index 0000000..c366f5a
--- /dev/null
+++ b/shared_migrations/db/server.py
@@ -0,0 +1,938 @@
+import dotenv
+import os
+##
+from sqlalchemy.future import select
+from sqlalchemy.orm import sessionmaker, aliased
+from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
+from sqlalchemy.pool import NullPool
+from sqlalchemy.ext.declarative import DeclarativeMeta
+from .models import Base, ContributorsRegistration,GithubClassroomData, IssueContributors
+from sqlalchemy import delete, insert
+from sqlalchemy import select, asc, desc,update, join
+from sqlalchemy.exc import IntegrityError
+from sqlalchemy.sql import exists
+from datetime import datetime
+from sqlalchemy import cast, String ,and_
+from sqlalchemy.dialects.postgresql import ARRAY
+from .models import Issues, CommunityOrgs, PointSystem, PrHistory
+
+# dotenv.load_dotenv(".env")
+
+
+# def get_postgres_uri():
+# DB_HOST = os.getenv('POSTGRES_DB_HOST')
+# DB_NAME = os.getenv('POSTGRES_DB_NAME')
+# DB_USER = os.getenv('POSTGRES_DB_USER')
+# DB_PASS = os.getenv('POSTGRES_DB_PASS')
+
+# # DB_URL = os.getenv('DATABASE_URL')
+# # print('db')
+# return f'postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}/{DB_NAME}'
+
+
+class ServerQueries:
+
+ # def __init__(self):
+ # DATABASE_URL = get_postgres_uri()
+ # # Initialize Async SQLAlchemy
+ # engine = create_async_engine(DATABASE_URL, echo=False,poolclass=NullPool)
+ # async_session = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession)
+ # self.session = async_session
+
+ # def get_instance():
+ # return PostgresORM()
+
+ def convert_dict(self,data):
+ try:
+ if type(data) == list:
+ data = [val.to_dict() for val in data]
+ else:
+ return [data.to_dict()]
+
+ return data
+ except Exception as e:
+ print(e)
+ raise Exception
+
+
+ def get_class_by_tablename(self,tablename):
+ try:
+ for cls in Base.registry._class_registry.values():
+ if isinstance(cls, DeclarativeMeta):
+ if hasattr(cls, '__tablename__') and cls.__tablename__ == tablename:
+ return cls
+ return None
+ except Exception as e:
+ print(f"ERROR get_class_by_tablename - {e}")
+ return None
+
+ async def readAll(self,table_class):
+ try:
+ table = self.get_class_by_tablename(table_class)
+ # Query all records from the specified table class
+ async with self.session() as session:
+ stmt = select(table)
+ result = await session.execute(stmt)
+
+ data = result.scalars().all()
+ result = self.convert_dict(data)
+ return result
+ except Exception as e:
+ print(f"An error occurred -read_all_from_table : {e}")
+ return None
+
+
+ async def deleteComment(self,issue_id,table_name):
+ try:
+ table = self.get_class_by_tablename(table_name)
+ async with self.session() as session:
+ stmt = delete(table).where(table.issue_id == issue_id)
+ await session.execute(stmt)
+ await session.commit()
+
+ return True
+
+ except Exception as e:
+ print(f"An error occurred - deleteComment: {e}")
+ return False
+
+ async def read(self, table, filters=None, select_columns=None, order=None, limit=None, offset=None):
+ """
+ Reads data from a table in the database using SQLAlchemy ORM.
+ """
+ try:
+ table_class = self.get_class_by_tablename(table)
+
+ # Select specific columns or all columns if None
+ if select_columns:
+ stmt = select([getattr(table_class, col) for col in select_columns])
+ else:
+ stmt = select(table_class)
+
+ # Apply filters
+ if filters:
+ for column, condition in filters.items():
+ if isinstance(condition, tuple) and len(condition) == 2:
+ operation, value = condition
+ col_attr = getattr(table_class, column)
+ if operation == 'gt':
+ stmt = stmt.where(col_attr > value)
+ elif operation == 'lt':
+ stmt = stmt.where(col_attr < value)
+ elif operation == 'gte':
+ stmt = stmt.where(col_attr >= value)
+ elif operation == 'lte':
+ stmt = stmt.where(col_attr <= value)
+ else:
+ stmt = stmt.where(getattr(table_class, column) == condition)
+
+ # Apply ordering
+ if order:
+ for column, direction in order.items():
+ if direction == 'asc':
+ stmt = stmt.order_by(asc(getattr(table_class, column)))
+ elif direction == 'desc':
+ stmt = stmt.order_by(desc(getattr(table_class, column)))
+
+ # Apply limit
+ if limit:
+ stmt = stmt.limit(limit)
+
+ # Apply offset
+ if offset:
+ stmt = stmt.offset(offset)
+
+ async with self.session() as session:
+ result = await session.execute(stmt)
+ data = result.scalars().all()
+
+ # Convert result to dictionary
+ return [row.to_dict() for row in data]
+
+ except Exception as e:
+ print(f"An error occurred - read: {e}")
+ return None
+
+
+ async def add_discord_metrics(self, discord_metrics):
+ try:
+ async with self.session() as session:
+ DiscordMetrics = self.get_class_by_tablename("discord_metrics")
+
+ for metric in discord_metrics:
+ stmt = select(DiscordMetrics).where(DiscordMetrics.product_name == metric["product_name"])
+ result = await session.execute(stmt)
+ existing_record = result.scalars().first()
+
+ if existing_record:
+ update_stmt = (
+ update(DiscordMetrics)
+ .where(DiscordMetrics.product_name == metric["product_name"])
+ .values(
+ mentor_messages=metric["mentor_messages"],
+ contributor_messages=metric["contributor_messages"]
+ )
+ .returning(DiscordMetrics)
+ )
+ updated_data = await session.execute(update_stmt)
+ data = updated_data.scalars().first()
+ else:
+ new_record = DiscordMetrics(**metric)
+ session.add(new_record)
+ await session.commit()
+ await session.refresh(new_record)
+ data = new_record
+
+ await session.commit()
+ return data
+
+ except IntegrityError as e:
+ print(f"An error occurred: {e}")
+ await session.rollback()
+ return None
+
+ async def add_github_metrics(self, github_metrics):
+ try:
+ async with self.session() as session:
+ for metric in github_metrics:
+ GithubMetrics = self.get_class_by_tablename("github_metrics")
+
+ # Check if the metric already exists in the database
+ stmt = select(GithubMetrics).where(GithubMetrics.product_name == metric["product_name"])
+ result = await session.execute(stmt)
+ existing_record = result.scalars().first()
+
+ if existing_record:
+ update_data = {key: value for key, value in metric.items() if key != "product_name"}
+
+ update_stmt = (
+ update(GithubMetrics)
+ .where(GithubMetrics.product_name == metric["product_name"])
+ .values(update_data)
+ .returning(GithubMetrics)
+ )
+ updated_data = await session.execute(update_stmt)
+ data = updated_data.scalars().first()
+ else:
+ # Insert the new metric if it doesn't exist
+ new_record = GithubMetrics(**metric)
+ session.add(new_record)
+ await session.commit()
+ await session.refresh(new_record)
+ data = new_record
+
+ await session.commit()
+ return data
+
+ except IntegrityError as e:
+ print(f"An error occurred: {e}")
+ await session.rollback()
+ return None
+
+ async def check_exists(self,discord_id, assignment_id):
+ try:
+ # Construct the query for check exists
+ async with self.session() as session:
+ stmt = (
+ select(exists()
+ .where((GithubClassroomData.discord_id.is_(None)) | (GithubClassroomData.discord_id == discord_id))
+ .where(GithubClassroomData.assignment_id == assignment_id)
+ )
+ )
+ result = await session.execute(stmt)
+ exists_result = result.scalar()
+
+ return exists_result
+
+ except Exception as e:
+ print(f"An error occurred: {e}")
+ return None
+
+ async def save_classroom_records(self, data):
+ try:
+ async with self.session() as session:
+ for record in data:
+ try:
+ new_record = GithubClassroomData(
+ **record)
+ session.add(new_record)
+
+ await session.commit()
+ print("Record inserting successfully!")
+ except Exception as e:
+ await session.rollback()
+ print("Error updating record:", e)
+
+ return True
+ except Exception as e:
+ print(f"An error occurred save_classroom_records: {e}")
+ return False
+
+ async def update_classroom_records(self, data):
+ async with self.session() as session:
+ for record in data:
+ try:
+ stmt = (
+ update(GithubClassroomData).
+ where(
+ GithubClassroomData.assignment_id == record.get('assignment_id'),
+ GithubClassroomData.discord_id == cast(str(record.get('discord_id')),String)
+ ).
+ values(
+ assignment_name=record.get('assignment', {}).get('title'),
+ assignment_url=record.get('assignment', {}).get('classroom', {}).get('url'),
+ c4gt_points=record.get('c4gt_points'),
+ github_username=record.get('students', [{}])[0].get('login'),
+ points_available=record.get('points_available'),
+ points_awarded=record.get('points_awarded',0),
+ roster_identifier=record.get('roster_identifier',""),
+ starter_code_url=record.get('starter_code_url', record.get('repository', {}).get('html_url')),
+ student_repository_name=record.get('repository', {}).get('full_name'),
+ student_repository_url=record.get('repository', {}).get('html_url'),
+ submission_timestamp=record.get('submission_timestamsp', datetime.now()),
+ updated_at=record.get('updated_at')
+ )
+ )
+ result = await session.execute(stmt)
+ await session.commit()
+ print("Record updated successfully!")
+ return True
+ except Exception as e:
+ await session.rollback()
+ print("Error updating record:", e)
+ return False
+
+ async def getdiscord_from_cr(self,github_url):
+ try:
+ Table = self.get_class_by_tablename("contributors_registration")
+ async with self.session() as session:
+ stmt = (select(Table.discord_id).where(Table.github_url == github_url))
+ result = await session.execute(stmt)
+ exists_result = result.scalar()
+
+ return exists_result
+ except Exception as e:
+ print("Error - getdiscord_from_cr:", e)
+ return None
+
+
+ async def add_data(self, data: dict, table_name: str):
+ try:
+ table_class = self.get_class_by_tablename(table_name)
+ if not table_class:
+ raise ValueError(f"Table class for {table_name} not found")
+
+ async with self.session() as session:
+ new_record = table_class(**data)
+ session.add(new_record)
+ await session.commit()
+ await session.refresh(new_record)
+
+ return new_record
+ except Exception as e:
+ print("Error - add_data:", e)
+ return None
+
+ async def insert_org(self, name):
+ try:
+ async with self.session() as session:
+ table = self.get_class_by_tablename("community_orgs")
+ if not table:
+ raise ValueError(f"No ORM class found for table community_orgs")
+
+ stmt = insert(table).values(
+ name=name
+ ).returning(table)
+
+ result = await session.execute(stmt)
+
+ await session.commit()
+ inserted_record = result.fetchone()
+ print("inserted_record ", {"id": inserted_record[0], "name": inserted_record[1]})
+ return {"id": inserted_record[0], "name": inserted_record[1]}
+
+ except Exception as e:
+ print(f"Error in record_created_ticket method: {e}")
+ return None
+
+
+
+ async def check_record_exists(self, table_name, filter_column, filter_value):
+ try:
+ table_class = self.get_class_by_tablename(table_name)
+ if not table_class:
+ raise ValueError(f"No ORM class found for table '{table_name}'")
+
+ async with self.session() as session:
+ stmt = (
+ select(table_class)
+ .where(getattr(table_class, filter_column) == filter_value)
+ )
+ result = await session.execute(stmt)
+ exists = result.scalars().first() is not None
+ return True if exists else False
+ except Exception as e:
+ print(f"An error occurred - check_record_exists: {e}")
+ return False
+
+
+ async def delete(self,table_name, filter_column, filter_value):
+ try:
+ table = self.get_class_by_tablename(table_name)
+ async with self.session() as session:
+ stmt = delete(table).where(getattr(table, filter_column) == filter_value)
+ await session.execute(stmt)
+ await session.commit()
+ return True
+
+ except Exception as e:
+ print(f"An error occurred - delete: {e}")
+ return False
+
+
+ async def get_data(self,col_name,table_name,value,condition=None):
+ try:
+ Table = self.get_class_by_tablename(table_name)
+ async with self.session() as session:
+ stmt = (select(Table).where(getattr(Table, col_name) == value))
+ # Execute the query
+ result = await session.execute(stmt)
+ exists_result = result.scalar()
+ if exists_result:
+ return self.convert_dict(exists_result)
+ else:
+ return None
+
+ except Exception as e:
+ print(f"An error occurred - get_data: {e}")
+ return None
+
+ async def checkIsTicket(self, issue_id):
+ try:
+ tables_to_check = ['issues']
+
+ async with self.session() as session:
+ data = []
+ for table_name in tables_to_check:
+ table_class = self.get_class_by_tablename(table_name)
+ if not table_class:
+ continue
+ stmt = select(table_class).where(getattr(table_class, 'issue_id') == issue_id)
+ result = await session.execute(stmt)
+ records = result.scalars().all()
+
+ if records:
+ data.extend(records)
+ # Check if data was found in any of the tables
+ if len(data) > 0:
+ return True
+ else:
+ return False
+ except Exception as e:
+ print(f"An error occurred - check_is_ticket: {e}")
+ return False
+
+
+ async def record_created_ticket(self, data,table_name):
+ try:
+ async with self.session() as session:
+ # Dynamically get the ORM class for the table
+ table = self.get_class_by_tablename(table_name)
+
+ # Build and execute the query to check if the issue_id already exists
+ # stmt = select(table).where(table.issue_id == data['issue_id'])
+
+ stmt = insert(table).values(
+ link=data['link'],
+ labels=cast(data['labels'], ARRAY(String)), # Cast to ARRAY type
+ complexity=data['complexity'],
+ technology=data['technology'],
+ status=data['status'],
+ created_at=data['created_at'],
+ updated_at=data['updated_at'],
+ title=data['title'],
+ domain=data['domain'],
+ description=f"{data['description']}",
+ org_id=data['org_id'],
+ issue_id=data['issue_id'],
+ project_type=data['project_type']
+ ).returning(table)
+
+ result = await session.execute(stmt)
+
+ await session.commit()
+
+ # inserted_record = await result.fetchone()
+ # print("inserted result ", inserted_record)
+ return result
+
+ except Exception as e:
+ print(f"Error in record_created_ticket method: {e}")
+ return None
+
+
+ async def record_updated_ticket(self, data, table_name):
+ try:
+ async with self.session() as session:
+ # Dynamically get the ORM class for the table
+ table = self.get_class_by_tablename(table_name)
+
+ # Build the update query
+ stmt = (
+ update(table)
+ .where(table.issue_id == data['issue_id']) # Match the existing issue by issue_id
+ .values(
+ link=data['link'],
+ labels=cast(data['labels'], ARRAY(String)), # Cast to ARRAY type
+ complexity=data['complexity'],
+ technology=data['technology'],
+ status=data['status'],
+ created_at=data['created_at'],
+ updated_at=data['updated_at'],
+ title=data['title'],
+ description=f"{data['description']}",
+ org_id=data['org_id']
+ )
+ .returning(table) # Return the updated row(s)
+ )
+
+ # Execute the update statement
+ result = await session.execute(stmt)
+
+ # Commit the transaction
+ await session.commit()
+
+ return result
+ except Exception as e:
+ print(f"Error in record_updated_ticket method: {e}")
+ return None
+
+
+ async def update_data(self, data, col_name, table_name):
+ try:
+ table_class = self.get_class_by_tablename(table_name)
+
+ async with self.session() as session:
+ stmt = (
+ update(table_class)
+ .where(getattr(table_class, col_name) == data[col_name])
+ .values(**data)
+ .returning(table_class)
+ )
+
+ result = await session.execute(stmt)
+ await session.commit()
+
+ updated_record = result.scalars().first()
+ # Convert the updated record to a dictionary before returning
+ return self.convert_dict(updated_record) if updated_record else None
+
+ except Exception as e:
+ print(f"Error in update_data: {e}")
+ return None
+
+
+ async def update_pr_data(self, data, table_name):
+ try:
+ table_class = self.get_class_by_tablename(table_name)
+
+ async with self.session() as session:
+ new_pr_history = PrHistory(
+ created_at= data['created_at'],
+ api_url=data['api_url'],
+ html_url= data['html_url'],
+ raised_by= data['raised_by'],
+ raised_at= data['raised_at'],
+ raised_by_username= data['raised_by_username'],
+ status= data['status'],
+ is_merged= data['is_merged'],
+ merged_by= data['merged_by'],
+ merged_at= data['merged_at'],
+ merged_by_username= data['merged_by_username'],
+ pr_id= data['pr_id']
+ )
+ stmt = (
+ update(table_class)
+ .where(table_class.pr_id == data['pr_id']) # Match the existing issue by issue_id
+ .values(
+
+ )
+ .returning(table_class) # Return the updated row(s)
+ )
+
+ # Execute the update statement
+ result = await session.execute(stmt)
+
+ # Commit the transaction
+ await session.commit()
+
+ # Optionally fetch the updated record(s)
+ updated_record = result.fetchone()
+
+ return updated_record if updated_record else None
+
+ except Exception as e:
+ print(f"Error in update_data: {e}")
+ return None
+
+
+ async def update_pr_history(self, pr_id, data):
+ try:
+ async with self.session() as session:
+ # Query for the existing record based on pr_id (or some unique identifier)
+ stmt = select(PrHistory).where(PrHistory.pr_id == pr_id)
+ result = await session.execute(stmt)
+ pr_history_record = result.scalars().first()
+
+ if pr_history_record:
+ # Update the fields with new values from data
+ pr_history_record.created_at = data['created_at']
+ pr_history_record.api_url = data['api_url']
+ pr_history_record.html_url = data['html_url']
+ pr_history_record.raised_by = data['raised_by']
+ pr_history_record.raised_at = data['raised_at']
+ pr_history_record.raised_by_username = data['raised_by_username']
+ pr_history_record.status = data['status']
+ pr_history_record.is_merged = data['is_merged']
+ pr_history_record.merged_by = data['merged_by']
+ pr_history_record.merged_at = None if data['merged_at'] is None else data['merged_at']
+ pr_history_record.merged_by_username = data['merged_by_username']
+ pr_history_record.ticket_url = data['ticket_url']
+ pr_history_record.ticket_complexity = data['ticket_complexity']
+
+ # Commit the changes to the database
+ await session.commit()
+
+ # Optionally refresh the object
+ await session.refresh(pr_history_record)
+
+ return pr_history_record
+ else:
+ print(f"Record with pr_id {pr_id} not found")
+ return None
+
+ except Exception as e:
+ print(f"Error in update_pr_history: {e}")
+ return None
+
+
+ async def addPr(self, prData, issue_id):
+ try:
+ if issue_id:
+ ticket = await self.get_data("issue_id","issues",issue_id,None)
+ if len(ticket) ==0:
+ ticket = await self.get_data("issue_id","dmp_tickets",issue_id,None)
+
+ for pr in prData:
+ data = {
+ # "api_url":data["url"],
+ "html_url":pr["html_url"],
+ "pr_id":pr["pr_id"],
+ "raised_by":pr["raised_by"],
+ "raised_at":pr["raised_at"],
+ "raised_by_username":pr["raised_by_username"],
+ "status":pr["status"],
+ "is_merged":pr["is_merged"] if pr.get("is_merged") else None,
+ "merged_by":pr["merged_by"] if pr["merged_by"] else None,
+ "merged_by_username":pr["merged_by_username"] if pr.get("merged_by_username") else None,
+ "merged_at":pr["merged_at"] if pr.get("merged_at") else None,
+ "points": ticket[0]["ticket_points"] if issue_id else 0,
+ "ticket_url":ticket[0]["api_endpoint_url"] if issue_id else 0
+ }
+ resp = await self.add_data(data,"connected_prs")
+
+ return True
+ except Exception as e:
+ print(f"Error in addPr: {e}")
+ return None
+
+
+ async def get_issue_from_issue_id(self,issue_id):
+ try:
+ async with self.session() as session:
+ # Dynamically get the ORM class for the table
+ table = self.get_class_by_tablename("issues")
+
+ # Build and execute the query to check if the issue_id already exists
+ stmt = select(table).where(table.issue_id == issue_id)
+ result = await session.execute(stmt)
+ issues = result.scalars().first()
+
+ if issues:
+ return self.convert_dict(issues)
+ return None
+
+ except Exception as e:
+ print(f"Error in get_issue_from_issue_id method: {e}")
+ return None
+
+ async def get_contributors_from_issue_id(self,issue_id):
+ try:
+ async with self.session() as session:
+ # Dynamically get the ORM class for the table
+ table = self.get_class_by_tablename("issue_contributors")
+
+ # Build and execute the query to check if the issue_id already exists
+ stmt = select(table).where(table.issue_id == issue_id)
+ result = await session.execute(stmt)
+ issues = result.scalars().all()
+
+ if issues:
+ return self.convert_dict(issues)
+ return None
+
+ except Exception as e:
+ print(f"Error in get_contributors_from_issue_id method: {e}")
+ return None
+
+ async def get_pointsby_complexity(self, complexity_type,type="Contributor"):
+ try:
+ async with self.session() as session:
+ # Dynamically get the ORM class for the table
+ table = self.get_class_by_tablename("points_mapping")
+
+ # Build and execute the query with multiple conditions
+ stmt = select(table).where(
+ and_(
+ table.complexity == complexity_type,
+ table.role == type
+ )
+ )
+ result = await session.execute(stmt)
+ points = result.scalars().all()
+ return points[0].points if points else 0
+
+ except Exception as e:
+ print(f"Error in get_pointsby_complexity method: {e}")
+ return None
+
+ async def upsert_point_transaction(self, issue_id, user_id, points,user_type="Contributor"):
+ try:
+ async with self.session() as session:
+ table = self.get_class_by_tablename("point_transactions")
+ column_map = {
+ "Contributor": table.user_id,
+ "Mentor": table.mentor_id,
+ }
+ chosen_column = column_map.get(user_type)
+ stmt = select(table).where(
+ and_(
+ table.issue_id == issue_id,
+ chosen_column == user_id
+ )
+ )
+
+ result = await session.execute(stmt)
+ transaction = result.scalars().one_or_none()
+
+ if transaction:
+ # Record exists, so update the points column
+ update_stmt = (
+ update(table)
+ .where(and_(table.issue_id == issue_id, table.user_id == user_id))
+ .values(point=points)
+ )
+ await session.execute(update_stmt)
+ await session.commit()
+ return True
+
+ else:
+ # Record does not exist, so create a new one
+ new_transaction = table(issue_id=issue_id,point=points)
+ setattr(new_transaction, chosen_column.key, user_id)
+ session.add(new_transaction)
+ await session.commit()
+ return True
+
+ except Exception as e:
+ print(f"Error in upsert_point_transaction method: {e}")
+ return None
+
+ async def save_user_points(self, user_id, points,user_type="Contributor"):
+ try:
+ async with self.session() as session:
+ table = self.get_class_by_tablename("user_points_mapping")
+ column_map = {
+ "Contributor": table.contributor,
+ "Mentor": table.mentor_id,
+ }
+ chosen_column = column_map.get(user_type)
+ stmt = select(table).where(chosen_column == user_id)
+
+ result = await session.execute(stmt)
+ transaction = result.scalars().one_or_none()
+
+
+ if transaction:
+ addon_points = points + transaction.points
+ update_stmt = (
+ update(table)
+ .where(chosen_column == user_id)
+ .values(points=addon_points)
+ )
+ await session.execute(update_stmt)
+ await session.commit()
+ return True
+
+ else:
+ # Record does not exist, so create a new one
+ new_transaction = table(points=points)
+ setattr(new_transaction, chosen_column.key, user_id)
+ session.add(new_transaction)
+ await session.commit()
+ return True
+
+ except Exception as e:
+ print(f"Error in save_user_points method: {e}")
+ return None
+
+
+ async def deleteIssueComment(self, commentId):
+ try:
+ async with self.session() as session:
+ # Dynamically get the ORM class for the table
+ table = self.get_class_by_tablename("ticket_comments")
+
+ # Build and execute the query with multiple conditions
+ stmt = delete(table).where(
+ getattr(table, "id") == commentId
+ )
+ result = await session.execute(stmt)
+ is_deleted = result.scalars().all()
+ return is_deleted
+ except Exception as e:
+ print(f"Error in deleting issue comments: {e}")
+ return None
+
+
+ async def getUserLeaderBoardData(self):
+ try:
+ async with self.session() as session:
+ orgs_alias = aliased(CommunityOrgs)
+ points_alias = aliased(PointSystem)
+
+ # Join the Issues table with the CommunityOrgs and PointSystem
+ stmt = (
+ select(Issues, orgs_alias, points_alias)
+ .join(orgs_alias, Issues.org_id == orgs_alias.id, isouter=True) # Left join with CommunityOrgs
+ .join(points_alias, Issues.complexity == points_alias.complexity, isouter=True) # Left join with PointSystem
+ )
+
+ # Execute the statement
+ result = await session.execute(stmt)
+
+ # Fetch all the results
+ records = result.all()
+
+ # Convert to dictionary format for readability (if needed)
+ return [
+ {
+ 'issue': issue.to_dict(),
+ 'community_org': org.to_dict() if org else None,
+ 'point_system': points.to_dict() if points else None
+ }
+ for issue, org, points in records
+ ]
+ except Exception as e:
+ print('Exception occured while getting users leaderboard data ', e)
+ return None
+
+
+ async def get_joined_data_with_filters(self, filters=None):
+ async with self.session() as session:
+ # Aliases for the tables
+ issues = aliased(Issues)
+ orgs = aliased(CommunityOrgs)
+ points = aliased(PointSystem)
+
+ # Base query with the join
+ query = select(
+ issues,
+ orgs,
+ points
+ ).join(
+ orgs, issues.org_id == orgs.id
+ ).join(
+ points, points.complexity == issues.complexity
+ )
+
+ # If dynamic filters are provided, apply them
+ if filters:
+ filter_conditions = []
+ for field, value in filters.items():
+ filter_conditions.append(getattr(issues, field) == value)
+
+ query = query.where(and_(*filter_conditions))
+
+ # Execute the query and return the results
+ result = await session.execute(query)
+ records = result.all()
+
+ # Convert results to dictionaries if necessary
+ return [dict(issue=record[0].to_dict(), org=record[1].to_dict(), points=record[2].to_dict()) for record in records]
+
+ async def fetch_filtered_issues(self, filters):
+ try:
+ async with self.session() as session:
+ # Start building the query by joining tables
+ query = (
+ select(Issues, CommunityOrgs, PointSystem, IssueContributors, ContributorsRegistration)
+ .join(CommunityOrgs, Issues.org_id == CommunityOrgs.id)
+ .join(PointSystem, Issues.complexity == PointSystem.complexity)
+ .outerjoin(IssueContributors, Issues.id == IssueContributors.issue_id)
+ .outerjoin(ContributorsRegistration, IssueContributors.contributor_id == ContributorsRegistration.id)
+ .where(Issues.complexity != 'Beginner')
+ .order_by(desc(Issues.id))
+ )
+
+ # Prepare dynamic filter conditions
+ conditions = []
+
+ # Check if there are filters for Issues table
+ if 'issues' in filters:
+ for field, value in filters['issues'].items():
+ conditions.append(getattr(Issues, field) == value)
+
+ # Check if there are filters for CommunityOrgs table
+ if 'org' in filters:
+ for field, value in filters['org'].items():
+ conditions.append(getattr(CommunityOrgs, field) == value)
+
+ # Check if there are filters for PointSystem table
+ if 'points' in filters:
+ for field, value in filters['points'].items():
+ conditions.append(getattr(PointSystem, field) == value)
+
+ # Apply filters (if any) to the query
+ if conditions:
+ query = query.where(and_(*conditions))
+
+ # Execute the query and fetch results
+ result = await session.execute(query)
+ rows = result.fetchall()
+
+ # Process the result into a dictionary or a preferred format
+ data = []
+ for row in rows:
+ issue = row.Issues.to_dict()
+ org = row.CommunityOrgs.to_dict() if row.CommunityOrgs else None
+ point_system = row.PointSystem.to_dict()
+ contributors_registration = row.ContributorsRegistration.to_dict() if row.ContributorsRegistration else None
+ data.append({
+ 'issue': issue,
+ 'org': org,
+ 'points': point_system,
+ 'contributors_registration': contributors_registration
+ })
+
+ return data
+
+ except Exception as e:
+ print(f"Error in fetch_filtered_issues: {e}")
+ return None
+
+
+ def add_github_user(self, user):
+ data = self.client.table("contributors_registration").upsert(user, on_conflict=["github_id", "discord_id"]).execute()
+ return data.data
+
diff --git a/shared_migrations/migrations/README b/shared_migrations/migrations/README
new file mode 100644
index 0000000..98e4f9c
--- /dev/null
+++ b/shared_migrations/migrations/README
@@ -0,0 +1 @@
+Generic single-database configuration.
\ No newline at end of file
diff --git a/shared_migrations/migrations/__pycache__/env.cpython-310.pyc b/shared_migrations/migrations/__pycache__/env.cpython-310.pyc
new file mode 100644
index 0000000..146b573
Binary files /dev/null and b/shared_migrations/migrations/__pycache__/env.cpython-310.pyc differ
diff --git a/shared_migrations/migrations/env.py b/shared_migrations/migrations/env.py
new file mode 100644
index 0000000..7961305
--- /dev/null
+++ b/shared_migrations/migrations/env.py
@@ -0,0 +1,99 @@
+from logging.config import fileConfig
+
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+from db.models import shared_metadata, Base
+
+from alembic import context
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+from dotenv import load_dotenv
+import os
+
+load_dotenv()
+url = os.getenv("DATABASE_URL")
+config.set_main_option("sqlalchemy.url", url)
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+if config.config_file_name is not None:
+ fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+# target_metadata = shared_metadata
+target_metadata = Base.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline() -> None:
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url,
+ target_metadata=target_metadata,
+ literal_binds=True,
+ dialect_opts={"paramstyle": "named"},
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online() -> None:
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ # connectable = engine_from_config(
+ # config.get_section(config.config_ini_section, {}),
+ # prefix="sqlalchemy.",
+ # poolclass=pool.NullPool,
+ # )
+
+ # with connectable.connect() as connection:
+ # context.configure(
+ # connection=connection, target_metadata=target_metadata
+ # )
+
+ # with context.begin_transaction():
+ # context.run_migrations()
+ engine = engine_from_config(
+ config.get_section(config.config_ini_section), prefix='sqlalchemy.')
+
+ with engine.connect() as connection:
+ context.configure(
+ connection=connection,
+ target_metadata=target_metadata,
+ compare_type=True
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/shared_migrations/migrations/script.py.mako b/shared_migrations/migrations/script.py.mako
new file mode 100644
index 0000000..fbc4b07
--- /dev/null
+++ b/shared_migrations/migrations/script.py.mako
@@ -0,0 +1,26 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision: str = ${repr(up_revision)}
+down_revision: Union[str, None] = ${repr(down_revision)}
+branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
+depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
+
+
+def upgrade() -> None:
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade() -> None:
+ ${downgrades if downgrades else "pass"}
diff --git a/shared_migrations/migrations/versions/8d1e6a7e959a_initial_migration.py b/shared_migrations/migrations/versions/8d1e6a7e959a_initial_migration.py
new file mode 100644
index 0000000..db77404
--- /dev/null
+++ b/shared_migrations/migrations/versions/8d1e6a7e959a_initial_migration.py
@@ -0,0 +1,1723 @@
+"""Initial migration
+
+Revision ID: 8d1e6a7e959a
+Revises:
+Create Date: 2024-12-18 18:12:00.911503
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision: str = '8d1e6a7e959a'
+down_revision: Union[str, None] = None
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('github_profile_data',
+ sa.Column('github_username', sa.String(), nullable=False),
+ sa.Column('discord_id', sa.BigInteger(), nullable=False),
+ sa.Column('classroom_points', sa.Integer(), nullable=False),
+ sa.Column('prs_raised', sa.Integer(), nullable=False),
+ sa.Column('prs_reviewed', sa.Integer(), nullable=False),
+ sa.Column('prs_merged', sa.Integer(), nullable=False),
+ sa.Column('dpg_points', sa.Integer(), nullable=False),
+ sa.Column('milestone', sa.Integer(), nullable=False),
+ sa.PrimaryKeyConstraint('github_username')
+ )
+ op.create_table('leaderboard',
+ sa.Column('discord_id', sa.BigInteger(), autoincrement=False, nullable=False),
+ sa.Column('github_id', sa.BigInteger(), nullable=False),
+ sa.Column('github_url', sa.Text(), nullable=False),
+ sa.Column('apprentice_badge', sa.Boolean(), nullable=True),
+ sa.Column('converser_badge', sa.Boolean(), nullable=False),
+ sa.Column('rockstar_badge', sa.Boolean(), nullable=False),
+ sa.Column('enthusiast_badge', sa.Boolean(), nullable=False),
+ sa.Column('rising_star_badge', sa.Boolean(), nullable=False),
+ sa.Column('github_x_discord_badge', sa.Boolean(), nullable=False),
+ sa.Column('points', sa.Integer(), nullable=False),
+ sa.Column('bronze_badge', sa.Boolean(), nullable=False),
+ sa.Column('silver_badge', sa.Boolean(), nullable=False),
+ sa.Column('gold_badge', sa.Boolean(), nullable=False),
+ sa.Column('ruby_badge', sa.Boolean(), nullable=False),
+ sa.Column('diamond_badge', sa.Boolean(), nullable=False),
+ sa.Column('certificate_link', sa.Text(), nullable=True),
+ sa.PrimaryKeyConstraint('discord_id')
+ )
+ op.create_table('role_master',
+ sa.Column('id', sa.BigInteger(), nullable=False),
+ sa.Column('created_at', db.models.DateTime(), nullable=False),
+ sa.Column('updated_at', db.models.DateTime(), nullable=True),
+ sa.Column('role', sa.Text(), nullable=True),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_table('unstructured_discord_data',
+ sa.Column('text', sa.Text(), nullable=True),
+ sa.Column('author', sa.BigInteger(), nullable=True),
+ sa.Column('channel', sa.BigInteger(), nullable=True),
+ sa.Column('channel_name', sa.Text(), nullable=True),
+ sa.Column('uuid', sa.String(length=36), nullable=False),
+ sa.Column('author_name', sa.Text(), nullable=True),
+ sa.Column('author_roles', sa.Text(), nullable=True),
+ sa.Column('sent_at', sa.Text(), nullable=True),
+ sa.PrimaryKeyConstraint('uuid')
+ )
+ op.create_table('user_points_mapping',
+ sa.Column('id', sa.UUID(), nullable=False),
+ sa.Column('contributor', sa.BigInteger(), nullable=True),
+ sa.Column('points', sa.Integer(), nullable=False),
+ sa.Column('level', sa.String(length=50), nullable=True),
+ sa.Column('created_at', db.models.DateTime(), nullable=False),
+ sa.Column('updated_at', db.models.DateTime(), nullable=False),
+ sa.Column('mentor_id', sa.BigInteger(), nullable=True),
+ sa.ForeignKeyConstraint(['contributor'], ['contributors_registration.id'], ),
+ sa.ForeignKeyConstraint(['mentor_id'], ['mentor_details.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.drop_table('__contributors_vc')
+ op.drop_table('__mentors')
+ op.drop_table('__mentorship_program_ticket_comments')
+ op.drop_table('__mentorship_program_pull_request')
+ op.drop_table('__mentorship_program_tickets')
+ op.drop_table('__community_program_unique_user_data')
+ op.drop_table('__contributors_discord')
+ op.drop_table('__applicant')
+ op.drop_table('__dashboard_config')
+ op.drop_table('__mentorship_program_projects')
+ op.drop_table('__comments')
+ op.drop_table('__dev_onboarding')
+ op.drop_table('contributors_registration_old')
+ op.drop_table('__pull_requests')
+ op.drop_table('__community_program_tickets')
+ op.drop_table('__community_organisations')
+ op.drop_table('__mentorship_program_selected_contributors')
+ op.drop_table('__community_program_product_wise_tickets')
+ op.drop_table('unstructured discord data')
+ op.alter_column('app_comments', 'id',
+ existing_type=sa.UUID(),
+ type_=sa.BigInteger(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text('gen_random_uuid()'))
+ op.alter_column('app_comments', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('app_comments', 'issue_id',
+ existing_type=sa.BIGINT(),
+ nullable=True)
+ op.alter_column('badges', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.UUID(),
+ existing_nullable=False)
+ op.alter_column('badges', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('badges', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('ccbp_tickets', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('ccbp_tickets', 'issue_id',
+ existing_type=sa.BIGINT(),
+ nullable=True)
+ op.alter_column('ccbp_tickets', 'index',
+ existing_type=sa.SMALLINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('ccbp_tickets', 'closed_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_comment='date-time at which issue was closed',
+ existing_nullable=True)
+ op.alter_column('chapters', 'org_name',
+ existing_type=sa.TEXT(),
+ nullable=True)
+ op.alter_column('chapters', 'discord_role_id',
+ existing_type=sa.BIGINT(),
+ nullable=True,
+ comment='db id of the corresponding member role in discord server',
+ existing_comment='db od of the corresponding member role in discord server')
+ op.alter_column('chapters', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('community_orgs', 'name',
+ existing_type=sa.TEXT(),
+ nullable=True)
+ op.alter_column('connected_prs', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('connected_prs', 'raised_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False)
+ op.alter_column('connected_prs', 'merged_at',
+ existing_type=postgresql.TIMESTAMP(),
+ type_=sa.Text(),
+ existing_nullable=True)
+ op.alter_column('contributor_names', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.add_column('contributor_points', sa.Column('contributors_id', sa.BigInteger(), nullable=True))
+ op.drop_constraint('contributor_points_contributors_id_fkey', 'contributor_points', type_='foreignkey')
+ op.create_foreign_key(None, 'contributor_points', 'contributors_registration', ['contributors_id'], ['id'])
+ op.drop_column('contributor_points', 'user_id')
+ op.alter_column('contributors_discord', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('contributors_discord', 'joined_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.drop_column('contributors_discord', 'city')
+ op.drop_column('contributors_discord', 'country')
+ op.drop_column('contributors_discord', 'experience')
+ op.alter_column('contributors_registration', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('contributors_registration', 'joined_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.drop_table_comment(
+ 'contributors_registration',
+ existing_comment='This is a duplicate of contributors_registration_old',
+ schema=None
+ )
+ op.add_column('discord_engagement', sa.Column('converserbadge', sa.Boolean(), nullable=True))
+ op.add_column('discord_engagement', sa.Column('apprenticebadge', sa.Boolean(), nullable=True))
+ op.add_column('discord_engagement', sa.Column('rockstarbadge', sa.Boolean(), nullable=True))
+ op.add_column('discord_engagement', sa.Column('enthusiastbadge', sa.Boolean(), nullable=True))
+ op.add_column('discord_engagement', sa.Column('risingstarbadge', sa.Boolean(), nullable=True))
+ op.alter_column('discord_engagement', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('discord_engagement', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.drop_column('discord_engagement', 'apprenticeBadge')
+ op.drop_column('discord_engagement', 'converserBadge')
+ op.drop_column('discord_engagement', 'risingStarBadge')
+ op.drop_column('discord_engagement', 'enthusiastBadge')
+ op.drop_column('discord_engagement', 'rockstarBadge')
+ op.alter_column('dmp_issue_updates', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('dmp_issue_updates', 'comment_updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.drop_constraint('dmp_issue_updates_comment_id_key', 'dmp_issue_updates', type_='unique')
+ op.drop_constraint('dmp_issue_updates_dmp_id_fkey', 'dmp_issue_updates', type_='foreignkey')
+ op.create_foreign_key(None, 'dmp_issue_updates', 'dmp_issues', ['dmp_id'], ['id'])
+ op.alter_column('dmp_issues', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_constraint('dmp_issues_dmp_id_key', 'dmp_issues', type_='unique')
+ op.drop_constraint('dmp_issues_org_id_fkey', 'dmp_issues', type_='foreignkey')
+ op.create_foreign_key(None, 'dmp_issues', 'dmp_orgs', ['org_id'], ['id'])
+ op.drop_column('dmp_issues', 'repo_owner')
+ op.add_column('dmp_orgs', sa.Column('version', sa.Text(), nullable=True))
+ op.alter_column('dmp_orgs', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('dmp_orgs', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.drop_constraint('dmp_orgs_id_key', 'dmp_orgs', type_='unique')
+ op.alter_column('dmp_pr_updates', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('dmp_pr_updates', 'pr_updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('dmp_pr_updates', 'merged_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('dmp_pr_updates', 'closed_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.drop_constraint('dmp_pr_updates_pr_id_key', 'dmp_pr_updates', type_='unique')
+ op.drop_constraint('dmp_pr_updates_dmp_id_fkey', 'dmp_pr_updates', type_='foreignkey')
+ op.create_foreign_key(None, 'dmp_pr_updates', 'dmp_issues', ['dmp_id'], ['id'])
+ op.alter_column('dmp_tickets', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('dmp_tickets', 'ticket_points',
+ existing_type=sa.SMALLINT(),
+ type_=sa.Integer(),
+ existing_comment='How many points the ticket is worth',
+ existing_nullable=True,
+ existing_server_default=sa.text("'0'::smallint"))
+ op.alter_column('dmp_tickets', 'index',
+ existing_type=sa.SMALLINT(),
+ server_default=None,
+ type_=sa.Integer(),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('dmp_week_updates', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_constraint('dmp_week_updates_dmp_id_fkey', 'dmp_week_updates', type_='foreignkey')
+ op.alter_column('github_classroom_data', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('github_classroom_data', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('github_classroom_data', 'submission_timestamp',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False)
+ op.alter_column('github_classroom_data', 'points_awarded',
+ existing_type=sa.VARCHAR(),
+ type_=sa.Integer(),
+ existing_nullable=True)
+ op.alter_column('github_classroom_data', 'points_available',
+ existing_type=sa.VARCHAR(),
+ type_=sa.Integer(),
+ existing_nullable=True)
+ op.create_table_comment(
+ 'github_classroom_data',
+ 'Table for saving the details about github classroom assignment data',
+ existing_comment='Table for save the details about github classroom assignment datas',
+ schema=None
+ )
+ op.alter_column('github_installations', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('github_installations', 'github_ids',
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
+ type_=sa.Text(),
+ comment="Identifiers on the github database, prolly won't be used",
+ existing_comment="identifiers on the github database, prolly won't be used",
+ existing_nullable=True)
+ op.alter_column('github_installations', 'permissions_and_events',
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
+ type_=sa.Text(),
+ existing_nullable=True)
+ op.alter_column('github_installations', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.drop_constraint('github_installations_organisation_fkey', 'github_installations', type_='foreignkey')
+ op.create_foreign_key(None, 'github_installations', 'community_orgs', ['organisation'], ['name'])
+ op.alter_column('github_organisations_to_organisations', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('github_organisations_to_organisations', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ comment='Creation date of organization ticket',
+ existing_comment='creation date of organization ticket',
+ existing_nullable=True)
+ op.alter_column('issue_contributors', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('issue_contributors', 'contributor_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ nullable=True)
+ op.alter_column('issue_contributors', 'issue_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False)
+ op.alter_column('issue_contributors', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('issue_contributors', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.drop_constraint('unique_issue_id_contributors', 'issue_contributors', type_='unique')
+ op.drop_constraint('issue_contributors_contributor_id_fkey', 'issue_contributors', type_='foreignkey')
+ op.create_foreign_key(None, 'issue_contributors', 'contributors_registration', ['contributor_id'], ['id'])
+ op.create_foreign_key(None, 'issue_contributors', 'role_master', ['role'], ['id'])
+ op.alter_column('issue_mentors', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)"))
+ op.alter_column('issue_mentors', 'issue_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ nullable=True)
+ op.alter_column('issue_mentors', 'angel_mentor_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=True)
+ op.drop_constraint('unique_issue_id_mentors', 'issue_mentors', type_='unique')
+ op.alter_column('issues', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text("nextval('issues_id_seq'::regclass)"))
+ op.drop_constraint('issues_org_id_fkey', 'issues', type_='foreignkey')
+ op.create_foreign_key(None, 'issues', 'community_orgs', ['org_id'], ['id'])
+ op.alter_column('mentor_details', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)"))
+ op.alter_column('mentor_not_added', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.add_column('mentorship_program_site_structure', sa.Column('product_id', sa.BigInteger(), nullable=True))
+ op.add_column('mentorship_program_site_structure', sa.Column('project_id', sa.BigInteger(), nullable=True))
+ op.add_column('mentorship_program_site_structure', sa.Column('contributor_id', sa.BigInteger(), nullable=True))
+ op.add_column('mentorship_program_site_structure', sa.Column('website_directory_label', sa.Text(), nullable=True))
+ op.alter_column('mentorship_program_site_structure', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_constraint('mentorship_program_site_structure_project_fkey', 'mentorship_program_site_structure', type_='foreignkey')
+ op.drop_constraint('mentorship_program_site_structure_product_fkey', 'mentorship_program_site_structure', type_='foreignkey')
+ op.drop_constraint('mentorship_program_site_structure_contributor_fkey', 'mentorship_program_site_structure', type_='foreignkey')
+ op.create_foreign_key(None, 'mentorship_program_site_structure', 'product', ['product_id'], ['id'])
+ op.drop_table_comment(
+ 'mentorship_program_site_structure',
+ existing_comment='a mapping for the milestones website structure',
+ schema=None
+ )
+ op.drop_column('mentorship_program_site_structure', 'project')
+ op.drop_column('mentorship_program_site_structure', 'product')
+ op.drop_column('mentorship_program_site_structure', 'website directory_label')
+ op.drop_column('mentorship_program_site_structure', 'contributor')
+ op.alter_column('mentorship_program_website_comments', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_comments', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_commits', 'date',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_commits', 'files',
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
+ type_=sa.Text(),
+ existing_nullable=True)
+ op.add_column('mentorship_program_website_has_updated', sa.Column('project_id', sa.BigInteger(), nullable=True))
+ op.alter_column('mentorship_program_website_has_updated', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week1_update_date',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week2_update_date',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week3_update_date',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week4_update_date',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week5_update_date',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week6_update_date',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week7_update_date',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week8_update_date',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week9_update_date',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.drop_constraint('mentorship_program_website_has_updated_project_fkey', 'mentorship_program_website_has_updated', type_='foreignkey')
+ op.drop_column('mentorship_program_website_has_updated', 'project')
+ op.alter_column('point_system', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('point_transactions', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('point_transactions', 'user_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=True)
+ op.alter_column('point_transactions', 'issue_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False)
+ op.alter_column('point_transactions', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ nullable=False,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('point_transactions', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ nullable=False,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('point_transactions', 'angel_mentor_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=True)
+ op.drop_constraint('point_transactions_user_id_fkey', 'point_transactions', type_='foreignkey')
+ op.create_foreign_key(None, 'point_transactions', 'mentor_details', ['angel_mentor_id'], ['id'])
+ op.create_foreign_key(None, 'point_transactions', 'contributors_registration', ['user_id'], ['id'])
+ op.alter_column('points_mapping', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('points_mapping', 'role',
+ existing_type=sa.TEXT(),
+ type_=sa.String(length=50),
+ nullable=False)
+ op.alter_column('points_mapping', 'complexity',
+ existing_type=sa.TEXT(),
+ type_=sa.String(length=50),
+ nullable=False)
+ op.alter_column('points_mapping', 'points',
+ existing_type=sa.INTEGER(),
+ nullable=False)
+ op.alter_column('points_mapping', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('points_mapping', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('pr_history', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)"))
+ op.alter_column('pr_history', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('pr_history', 'raised_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False)
+ op.alter_column('pr_history', 'pr_id',
+ existing_type=sa.BIGINT(),
+ comment=None,
+ existing_comment='github id of the pr',
+ existing_nullable=False)
+ op.drop_table_comment(
+ 'pr_history',
+ existing_comment='Holds records of pr webhooks',
+ schema=None
+ )
+ op.drop_column('pr_history', 'points')
+ op.alter_column('pr_staging', 'id',
+ existing_type=sa.UUID(),
+ type_=sa.String(length=36),
+ existing_nullable=False,
+ existing_server_default=sa.text('gen_random_uuid()'))
+ op.alter_column('pr_staging', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('pr_staging', 'raised_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False)
+ op.alter_column('pr_staging', 'pr_id',
+ existing_type=sa.BIGINT(),
+ comment=None,
+ existing_comment='github id of the pr',
+ existing_nullable=False)
+ op.drop_table_comment(
+ 'pr_staging',
+ existing_comment='This is a duplicate of connected_prs',
+ schema=None
+ )
+ op.add_column('product', sa.Column('channel_id', sa.BigInteger(), nullable=True))
+ op.alter_column('product', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('product', 'description',
+ existing_type=sa.TEXT(),
+ comment=None,
+ existing_comment='URL to the product entry on C4GT wiki',
+ existing_nullable=True,
+ existing_server_default=sa.text("''::text"))
+ op.drop_constraint('product_channel_fkey', 'product', type_='foreignkey')
+ op.create_foreign_key(None, 'product', 'discord_channels', ['channel_id'], ['channel_id'])
+ op.drop_table_comment(
+ 'product',
+ existing_comment="A table containing all 'Products' in C4GT 2023",
+ schema=None
+ )
+ op.drop_column('product', 'channel')
+ op.alter_column('ticket_comments', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('ticket_comments', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True)
+ op.alter_column('unlisted_tickets', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('unlisted_tickets', 'ticket_points',
+ existing_type=sa.SMALLINT(),
+ comment=None,
+ existing_comment='How many points the ticket is worth',
+ existing_nullable=True,
+ existing_server_default=sa.text("'0'::smallint"))
+ op.alter_column('unlisted_tickets', 'index',
+ existing_type=sa.SMALLINT(),
+ server_default=None,
+ existing_nullable=False)
+ op.alter_column('unlisted_tickets', 'uuid',
+ existing_type=sa.UUID(),
+ type_=sa.String(length=36),
+ existing_nullable=False,
+ existing_server_default=sa.text('gen_random_uuid()'))
+ op.create_unique_constraint(None, 'unlisted_tickets', ['uuid', 'issue_id'])
+ op.add_column('user_activity', sa.Column('contributor_id', sa.BigInteger(), nullable=False))
+ op.alter_column('user_activity', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('user_activity', 'issue_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False)
+ op.alter_column('user_activity', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_activity', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_activity', 'mentor_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=True)
+ op.drop_constraint('user_activity_user_id_fkey', 'user_activity', type_='foreignkey')
+ op.drop_constraint('user_activity_mentor_id_fkey', 'user_activity', type_='foreignkey')
+ op.create_foreign_key(None, 'user_activity', 'contributors_registration', ['contributor_id'], ['id'])
+ op.create_foreign_key(None, 'user_activity', 'mentor_details', ['mentor_id'], ['id'])
+ op.drop_column('user_activity', 'user_id')
+ op.alter_column('user_badges', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.UUID(),
+ existing_nullable=False)
+ op.alter_column('user_badges', 'user_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False)
+ op.alter_column('user_badges', 'badge_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False)
+ op.alter_column('user_badges', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_badges', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_certificates', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.UUID(),
+ existing_nullable=False)
+ op.alter_column('user_certificates', 'user_id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False)
+ op.alter_column('user_certificates', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_certificates', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('users', 'id',
+ existing_type=sa.INTEGER(),
+ type_=sa.BigInteger(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text("nextval('users_id_seq'::regclass)"))
+ op.alter_column('users', 'name',
+ existing_type=sa.TEXT(),
+ nullable=True)
+ op.alter_column('users', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('users', 'updated_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.create_unique_constraint(None, 'users', ['discord'])
+ op.alter_column('vc_logs', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=None,
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('vc_logs', 'created_at',
+ existing_type=postgresql.TIMESTAMP(timezone=True),
+ type_=db.models.DateTime(),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.alter_column('vc_logs', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('vc_logs', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_constraint(None, 'users', type_='unique')
+ op.alter_column('users', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('users', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('users', 'name',
+ existing_type=sa.TEXT(),
+ nullable=False)
+ op.alter_column('users', 'id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text("nextval('users_id_seq'::regclass)"))
+ op.alter_column('user_certificates', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_certificates', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_certificates', 'user_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False)
+ op.alter_column('user_certificates', 'id',
+ existing_type=sa.UUID(),
+ type_=sa.INTEGER(),
+ existing_nullable=False)
+ op.alter_column('user_badges', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_badges', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_badges', 'badge_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False)
+ op.alter_column('user_badges', 'user_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False)
+ op.alter_column('user_badges', 'id',
+ existing_type=sa.UUID(),
+ type_=sa.INTEGER(),
+ existing_nullable=False)
+ op.add_column('user_activity', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False))
+ op.drop_constraint(None, 'user_activity', type_='foreignkey')
+ op.drop_constraint(None, 'user_activity', type_='foreignkey')
+ op.create_foreign_key('user_activity_mentor_id_fkey', 'user_activity', 'mentor_details', ['mentor_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
+ op.create_foreign_key('user_activity_user_id_fkey', 'user_activity', 'users', ['user_id'], ['id'])
+ op.alter_column('user_activity', 'mentor_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=True)
+ op.alter_column('user_activity', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_activity', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('user_activity', 'issue_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False)
+ op.alter_column('user_activity', 'id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_column('user_activity', 'contributor_id')
+ op.drop_constraint(None, 'unlisted_tickets', type_='unique')
+ op.alter_column('unlisted_tickets', 'uuid',
+ existing_type=sa.String(length=36),
+ type_=sa.UUID(),
+ existing_nullable=False,
+ existing_server_default=sa.text('gen_random_uuid()'))
+ op.alter_column('unlisted_tickets', 'index',
+ existing_type=sa.SMALLINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1),
+ existing_nullable=False)
+ op.alter_column('unlisted_tickets', 'ticket_points',
+ existing_type=sa.SMALLINT(),
+ comment='How many points the ticket is worth',
+ existing_nullable=True,
+ existing_server_default=sa.text("'0'::smallint"))
+ op.alter_column('unlisted_tickets', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('ticket_comments', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('ticket_comments', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.add_column('product', sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True))
+ op.create_table_comment(
+ 'product',
+ "A table containing all 'Products' in C4GT 2023",
+ existing_comment=None,
+ schema=None
+ )
+ op.drop_constraint(None, 'product', type_='foreignkey')
+ op.create_foreign_key('product_channel_fkey', 'product', 'discord_channels', ['channel'], ['channel_id'])
+ op.alter_column('product', 'description',
+ existing_type=sa.TEXT(),
+ comment='URL to the product entry on C4GT wiki',
+ existing_nullable=True,
+ existing_server_default=sa.text("''::text"))
+ op.alter_column('product', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_column('product', 'channel_id')
+ op.create_table_comment(
+ 'pr_staging',
+ 'This is a duplicate of connected_prs',
+ existing_comment=None,
+ schema=None
+ )
+ op.alter_column('pr_staging', 'pr_id',
+ existing_type=sa.BIGINT(),
+ comment='github id of the pr',
+ existing_nullable=False)
+ op.alter_column('pr_staging', 'raised_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False)
+ op.alter_column('pr_staging', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('pr_staging', 'id',
+ existing_type=sa.String(length=36),
+ type_=sa.UUID(),
+ existing_nullable=False,
+ existing_server_default=sa.text('gen_random_uuid()'))
+ op.add_column('pr_history', sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False))
+ op.create_table_comment(
+ 'pr_history',
+ 'Holds records of pr webhooks',
+ existing_comment=None,
+ schema=None
+ )
+ op.alter_column('pr_history', 'pr_id',
+ existing_type=sa.BIGINT(),
+ comment='github id of the pr',
+ existing_nullable=False)
+ op.alter_column('pr_history', 'raised_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False)
+ op.alter_column('pr_history', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('pr_history', 'id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text("nextval('pr_history_id_seq'::regclass)"))
+ op.alter_column('points_mapping', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('points_mapping', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('points_mapping', 'points',
+ existing_type=sa.INTEGER(),
+ nullable=True)
+ op.alter_column('points_mapping', 'complexity',
+ existing_type=sa.String(length=50),
+ type_=sa.TEXT(),
+ nullable=True)
+ op.alter_column('points_mapping', 'role',
+ existing_type=sa.String(length=50),
+ type_=sa.TEXT(),
+ nullable=True)
+ op.alter_column('points_mapping', 'id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_constraint(None, 'point_transactions', type_='foreignkey')
+ op.drop_constraint(None, 'point_transactions', type_='foreignkey')
+ op.create_foreign_key('point_transactions_user_id_fkey', 'point_transactions', 'contributors_registration', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
+ op.alter_column('point_transactions', 'angel_mentor_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=True)
+ op.alter_column('point_transactions', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('point_transactions', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('point_transactions', 'issue_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False)
+ op.alter_column('point_transactions', 'user_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=True)
+ op.alter_column('point_transactions', 'id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('point_system', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.add_column('mentorship_program_website_has_updated', sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True))
+ op.create_foreign_key('mentorship_program_website_has_updated_project_fkey', 'mentorship_program_website_has_updated', '__mentorship_program_projects', ['project'], ['name'])
+ op.alter_column('mentorship_program_website_has_updated', 'week9_update_date',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week8_update_date',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week7_update_date',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week6_update_date',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week5_update_date',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week4_update_date',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week3_update_date',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week2_update_date',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'week1_update_date',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_has_updated', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_column('mentorship_program_website_has_updated', 'project_id')
+ op.alter_column('mentorship_program_website_commits', 'files',
+ existing_type=sa.Text(),
+ type_=postgresql.JSON(astext_type=sa.Text()),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_commits', 'date',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_comments', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('mentorship_program_website_comments', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.add_column('mentorship_program_site_structure', sa.Column('contributor', sa.TEXT(), autoincrement=False, nullable=True))
+ op.add_column('mentorship_program_site_structure', sa.Column('website directory_label', sa.TEXT(), autoincrement=False, nullable=True))
+ op.add_column('mentorship_program_site_structure', sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True))
+ op.add_column('mentorship_program_site_structure', sa.Column('project', sa.TEXT(), autoincrement=False, nullable=True))
+ op.create_table_comment(
+ 'mentorship_program_site_structure',
+ 'a mapping for the milestones website structure',
+ existing_comment=None,
+ schema=None
+ )
+ op.drop_constraint(None, 'mentorship_program_site_structure', type_='foreignkey')
+ op.create_foreign_key('mentorship_program_site_structure_contributor_fkey', 'mentorship_program_site_structure', '__mentorship_program_selected_contributors', ['contributor'], ['name'])
+ op.create_foreign_key('mentorship_program_site_structure_product_fkey', 'mentorship_program_site_structure', 'product', ['product'], ['name'])
+ op.create_foreign_key('mentorship_program_site_structure_project_fkey', 'mentorship_program_site_structure', '__mentorship_program_projects', ['project'], ['name'])
+ op.alter_column('mentorship_program_site_structure', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_column('mentorship_program_site_structure', 'website_directory_label')
+ op.drop_column('mentorship_program_site_structure', 'contributor_id')
+ op.drop_column('mentorship_program_site_structure', 'project_id')
+ op.drop_column('mentorship_program_site_structure', 'product_id')
+ op.alter_column('mentor_not_added', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=True, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('mentor_details', 'id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text("nextval('mentor_details_id_seq'::regclass)"))
+ op.drop_constraint(None, 'issues', type_='foreignkey')
+ op.create_foreign_key('issues_org_id_fkey', 'issues', 'community_orgs', ['org_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL')
+ op.alter_column('issues', 'id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text("nextval('issues_id_seq'::regclass)"))
+ op.create_unique_constraint('unique_issue_id_mentors', 'issue_mentors', ['issue_id'])
+ op.alter_column('issue_mentors', 'angel_mentor_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=True)
+ op.alter_column('issue_mentors', 'issue_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ nullable=False)
+ op.alter_column('issue_mentors', 'id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text("nextval('issue_mentors_id_seq'::regclass)"))
+ op.drop_constraint(None, 'issue_contributors', type_='foreignkey')
+ op.drop_constraint(None, 'issue_contributors', type_='foreignkey')
+ op.create_foreign_key('issue_contributors_contributor_id_fkey', 'issue_contributors', 'contributors_registration', ['contributor_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
+ op.create_unique_constraint('unique_issue_id_contributors', 'issue_contributors', ['issue_id'])
+ op.alter_column('issue_contributors', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('issue_contributors', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('issue_contributors', 'issue_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False)
+ op.alter_column('issue_contributors', 'contributor_id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ nullable=False)
+ op.alter_column('issue_contributors', 'id',
+ existing_type=sa.BigInteger(),
+ type_=sa.INTEGER(),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('github_organisations_to_organisations', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ comment='creation date of organization ticket',
+ existing_comment='Creation date of organization ticket',
+ existing_nullable=True)
+ op.alter_column('github_organisations_to_organisations', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_constraint(None, 'github_installations', type_='foreignkey')
+ op.create_foreign_key('github_installations_organisation_fkey', 'github_installations', '__community_organisations', ['organisation'], ['name'], onupdate='CASCADE')
+ op.alter_column('github_installations', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('github_installations', 'permissions_and_events',
+ existing_type=sa.Text(),
+ type_=postgresql.JSON(astext_type=sa.Text()),
+ existing_nullable=True)
+ op.alter_column('github_installations', 'github_ids',
+ existing_type=sa.Text(),
+ type_=postgresql.JSON(astext_type=sa.Text()),
+ comment="identifiers on the github database, prolly won't be used",
+ existing_comment="Identifiers on the github database, prolly won't be used",
+ existing_nullable=True)
+ op.alter_column('github_installations', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.create_table_comment(
+ 'github_classroom_data',
+ 'Table for save the details about github classroom assignment datas',
+ existing_comment='Table for saving the details about github classroom assignment data',
+ schema=None
+ )
+ op.alter_column('github_classroom_data', 'points_available',
+ existing_type=sa.Integer(),
+ type_=sa.VARCHAR(),
+ existing_nullable=True)
+ op.alter_column('github_classroom_data', 'points_awarded',
+ existing_type=sa.Integer(),
+ type_=sa.VARCHAR(),
+ existing_nullable=True)
+ op.alter_column('github_classroom_data', 'submission_timestamp',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False)
+ op.alter_column('github_classroom_data', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('github_classroom_data', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.create_foreign_key('dmp_week_updates_dmp_id_fkey', 'dmp_week_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
+ op.alter_column('dmp_week_updates', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('dmp_tickets', 'index',
+ existing_type=sa.Integer(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1),
+ type_=sa.SMALLINT(),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('dmp_tickets', 'ticket_points',
+ existing_type=sa.Integer(),
+ type_=sa.SMALLINT(),
+ existing_comment='How many points the ticket is worth',
+ existing_nullable=True,
+ existing_server_default=sa.text("'0'::smallint"))
+ op.alter_column('dmp_tickets', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.drop_constraint(None, 'dmp_pr_updates', type_='foreignkey')
+ op.create_foreign_key('dmp_pr_updates_dmp_id_fkey', 'dmp_pr_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
+ op.create_unique_constraint('dmp_pr_updates_pr_id_key', 'dmp_pr_updates', ['pr_id'])
+ op.alter_column('dmp_pr_updates', 'closed_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('dmp_pr_updates', 'merged_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('dmp_pr_updates', 'pr_updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('dmp_pr_updates', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.create_unique_constraint('dmp_orgs_id_key', 'dmp_orgs', ['id'])
+ op.alter_column('dmp_orgs', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('dmp_orgs', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_column('dmp_orgs', 'version')
+ op.add_column('dmp_issues', sa.Column('repo_owner', sa.TEXT(), autoincrement=False, nullable=True))
+ op.drop_constraint(None, 'dmp_issues', type_='foreignkey')
+ op.create_foreign_key('dmp_issues_org_id_fkey', 'dmp_issues', 'dmp_orgs', ['org_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
+ op.create_unique_constraint('dmp_issues_dmp_id_key', 'dmp_issues', ['id'])
+ op.alter_column('dmp_issues', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_constraint(None, 'dmp_issue_updates', type_='foreignkey')
+ op.create_foreign_key('dmp_issue_updates_dmp_id_fkey', 'dmp_issue_updates', 'dmp_issues', ['dmp_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
+ op.create_unique_constraint('dmp_issue_updates_comment_id_key', 'dmp_issue_updates', ['comment_id'])
+ op.alter_column('dmp_issue_updates', 'comment_updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('dmp_issue_updates', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.add_column('discord_engagement', sa.Column('rockstarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True))
+ op.add_column('discord_engagement', sa.Column('enthusiastBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True))
+ op.add_column('discord_engagement', sa.Column('risingStarBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True))
+ op.add_column('discord_engagement', sa.Column('converserBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True))
+ op.add_column('discord_engagement', sa.Column('apprenticeBadge', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True))
+ op.alter_column('discord_engagement', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('discord_engagement', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.drop_column('discord_engagement', 'risingstarbadge')
+ op.drop_column('discord_engagement', 'enthusiastbadge')
+ op.drop_column('discord_engagement', 'rockstarbadge')
+ op.drop_column('discord_engagement', 'apprenticebadge')
+ op.drop_column('discord_engagement', 'converserbadge')
+ op.create_table_comment(
+ 'contributors_registration',
+ 'This is a duplicate of contributors_registration_old',
+ existing_comment=None,
+ schema=None
+ )
+ op.alter_column('contributors_registration', 'joined_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('contributors_registration', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.add_column('contributors_discord', sa.Column('experience', sa.TEXT(), autoincrement=False, nullable=True))
+ op.add_column('contributors_discord', sa.Column('country', sa.TEXT(), autoincrement=False, nullable=True))
+ op.add_column('contributors_discord', sa.Column('city', sa.TEXT(), autoincrement=False, nullable=True))
+ op.alter_column('contributors_discord', 'joined_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('contributors_discord', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.add_column('contributor_points', sa.Column('user_id', sa.BIGINT(), autoincrement=False, nullable=True))
+ op.drop_constraint(None, 'contributor_points', type_='foreignkey')
+ op.create_foreign_key('contributor_points_contributors_id_fkey', 'contributor_points', 'contributors_registration', ['user_id'], ['id'])
+ op.drop_column('contributor_points', 'contributors_id')
+ op.alter_column('contributor_names', 'id',
+ existing_type=sa.BIGINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('connected_prs', 'merged_at',
+ existing_type=sa.Text(),
+ type_=postgresql.TIMESTAMP(),
+ existing_nullable=True)
+ op.alter_column('connected_prs', 'raised_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=False)
+ op.alter_column('connected_prs', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('community_orgs', 'name',
+ existing_type=sa.TEXT(),
+ nullable=False)
+ op.alter_column('chapters', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True)
+ op.alter_column('chapters', 'discord_role_id',
+ existing_type=sa.BIGINT(),
+ nullable=False,
+ comment='db od of the corresponding member role in discord server',
+ existing_comment='db id of the corresponding member role in discord server')
+ op.alter_column('chapters', 'org_name',
+ existing_type=sa.TEXT(),
+ nullable=False)
+ op.alter_column('ccbp_tickets', 'closed_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_comment='date-time at which issue was closed',
+ existing_nullable=True)
+ op.alter_column('ccbp_tickets', 'index',
+ existing_type=sa.SMALLINT(),
+ server_default=sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=32767, cycle=False, cache=1),
+ existing_nullable=False,
+ autoincrement=True)
+ op.alter_column('ccbp_tickets', 'issue_id',
+ existing_type=sa.BIGINT(),
+ nullable=False)
+ op.alter_column('ccbp_tickets', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('badges', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('badges', 'created_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('CURRENT_TIMESTAMP'))
+ op.alter_column('badges', 'id',
+ existing_type=sa.UUID(),
+ type_=sa.INTEGER(),
+ existing_nullable=False)
+ op.alter_column('app_comments', 'issue_id',
+ existing_type=sa.BIGINT(),
+ nullable=False)
+ op.alter_column('app_comments', 'updated_at',
+ existing_type=db.models.DateTime(),
+ type_=postgresql.TIMESTAMP(timezone=True),
+ existing_nullable=True,
+ existing_server_default=sa.text('now()'))
+ op.alter_column('app_comments', 'id',
+ existing_type=sa.BigInteger(),
+ type_=sa.UUID(),
+ existing_nullable=False,
+ autoincrement=True,
+ existing_server_default=sa.text('gen_random_uuid()'))
+ op.create_table('unstructured discord data',
+ sa.Column('text', sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.Column('author', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('channel', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('channel_name', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('uuid', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False),
+ sa.Column('author_name', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('author_roles', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True),
+ sa.Column('sent_at', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('uuid', name='unstructured discord data_duplicate_pkey'),
+ sa.UniqueConstraint('uuid', name='unstructured discord data_duplicate_uuid_key')
+ )
+ op.create_table('__community_program_product_wise_tickets',
+ sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True),
+ sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('gh_organisation', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('repository name', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='community_program_tickets_duplicate_pkey')
+ )
+ op.create_table('__mentorship_program_selected_contributors',
+ sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False),
+ sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('project_name', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='mentorship_program_selected_contributors_pkey'),
+ sa.UniqueConstraint('name', name='mentorship_program_selected_contributors_name_key'),
+ comment='List of contributors selected for C4GT Mentorship Program 2023'
+ )
+ op.create_table('__community_organisations',
+ sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False),
+ sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='organisations_pkey'),
+ sa.UniqueConstraint('name', name='organisations_name_key'),
+ postgresql_ignore_search_path=False
+ )
+ op.create_table('__community_program_tickets',
+ sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('community_label', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='community_program_tickets_pkey')
+ )
+ op.create_table('__pull_requests',
+ sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False),
+ sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
+ sa.Column('api_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('raised_by', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('raised_at', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('is_merged', sa.BOOLEAN(), autoincrement=False, nullable=True),
+ sa.Column('merged_by', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('merged_at', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('pr_id', sa.BIGINT(), autoincrement=False, nullable=False, comment='github id of the pr'),
+ sa.Column('points', sa.SMALLINT(), server_default=sa.text("'10'::smallint"), autoincrement=False, nullable=False),
+ sa.Column('ticket_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='pull_requests_pkey1'),
+ sa.UniqueConstraint('html_url', name='pull_requests_html_url_key'),
+ sa.UniqueConstraint('pr_id', name='pull_requests_pr_id_key')
+ )
+ op.create_table('contributors_registration_old',
+ sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False),
+ sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=False),
+ sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False),
+ sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='contributors_registration_duplicate_pkey'),
+ sa.UniqueConstraint('discord_id', name='contributors_registration_duplicate_discord_id_key'),
+ sa.UniqueConstraint('github_id', name='contributors_registration_duplicate_github_id_key')
+ )
+ op.create_table('__dev_onboarding',
+ sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False),
+ sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=False),
+ sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('repos', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='Onboarding_Dev_pkey'),
+ sa.UniqueConstraint('organisation', name='Onboarding_Dev_org_key')
+ )
+ op.create_table('__comments',
+ sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
+ sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
+ sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='comments_pkey')
+ )
+ op.create_table('__mentorship_program_projects',
+ sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False),
+ sa.Column('name', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('repository', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('product', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('issue_page_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('isssue_api_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('repository_api_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.ForeignKeyConstraint(['product'], ['product.name'], name='__mentorship_program_projects_product_fkey', ondelete='SET DEFAULT'),
+ sa.PrimaryKeyConstraint('id', name='projects_pkey'),
+ sa.UniqueConstraint('name', name='projects_name_key'),
+ comment='Selected projects under C4GT 2023'
+ )
+ op.create_table('__dashboard_config',
+ sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False),
+ sa.Column('dashboard', sa.TEXT(), autoincrement=False, nullable=False),
+ sa.Column('starting date', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='dashboard _config_pkey')
+ )
+ op.create_table('__applicant',
+ sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False),
+ sa.Column('sheet_username', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='applicant_pkey'),
+ sa.UniqueConstraint('discord_id', name='applicant_discord_id_key')
+ )
+ op.create_table('__contributors_discord',
+ sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False),
+ sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.Column('joined_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False),
+ sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column(' name', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('chapter', sa.TEXT(), autoincrement=False, nullable=True, comment="the chapter they're associated with"),
+ sa.Column('gender', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='__contributors_pkey'),
+ sa.UniqueConstraint('discord_id', name='__contributors_discord_id_key')
+ )
+ op.create_table('__community_program_unique_user_data',
+ sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False),
+ sa.Column('ticket_name', sa.TEXT(), autoincrement=False, nullable=False),
+ sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('linked_pr', sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column('linked_pr_author_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('linked_pr_author_username', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('is_registered', sa.BOOLEAN(), autoincrement=False, nullable=True),
+ sa.Column('ticket_link', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('linked_pr_link', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True),
+ sa.Column('state', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='community_program_unique_user_data_pkey')
+ )
+ op.create_table('__mentorship_program_tickets',
+ sa.Column('url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('repository_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('comments_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('events_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('html_url', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column('node_id', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('raised_by_username', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('raised_by_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('labels', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('status', sa.VARCHAR(length=255), autoincrement=False, nullable=True),
+ sa.Column('assignees', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='mentorship_program_tickets_pkey')
+ )
+ op.create_table('__mentorship_program_pull_request',
+ sa.Column('pr_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('pr_id', sa.INTEGER(), autoincrement=False, nullable=False),
+ sa.Column('pr_node_id', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('status', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('title', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('raised_by_username', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('raised_by_id', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('closed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('merged_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column('assignees', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True),
+ sa.Column('requested_reviewers', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True),
+ sa.Column('labels', postgresql.ARRAY(sa.TEXT()), autoincrement=False, nullable=True),
+ sa.Column('review_comments_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('comments_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('repository_id', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('repository_owner_name', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('repository_owner_id', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('repository_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('merged', sa.BOOLEAN(), autoincrement=False, nullable=True),
+ sa.Column('number_of_commits', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('number_of_comments', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('lines_of_code_added', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('lines_of_code_removed', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('number_of_files_changed', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('merged_by_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('merged_by_username', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('linked_ticket', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('pr_id', name='mentorship_program_pull_request_pkey')
+ )
+ op.create_table('__mentorship_program_ticket_comments',
+ sa.Column('id', sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column('url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('html_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('issue_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('node_id', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('commented_by', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('commented_by_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
+ sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
+ sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('reactions_url', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id', name='mentorship_program_ticket_comments_pkey')
+ )
+ op.create_table('__mentors',
+ sa.Column('id', sa.BIGINT(), sa.Identity(always=False, start=1, increment=1, minvalue=1, maxvalue=9223372036854775807, cycle=False, cache=1), autoincrement=True, nullable=False),
+ sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('github_id', sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column('github_url', sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.Column('discord_username', sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.Column('organisation', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('name', sa.TEXT(), autoincrement=False, nullable=False),
+ sa.Column('email', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.ForeignKeyConstraint(['organisation'], ['__community_organisations.name'], name='__mentors_organisation_fkey'),
+ sa.PrimaryKeyConstraint('id', name='mentors_pkey')
+ )
+ op.create_table('__contributors_vc',
+ sa.Column('github_username', sa.TEXT(), autoincrement=False, nullable=False),
+ sa.Column('discord_id', sa.BIGINT(), autoincrement=False, nullable=False),
+ sa.Column('certificate_link', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.Column('stats', sa.TEXT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('github_username', name='contributors_vc_pkey')
+ )
+ op.drop_table('user_points_mapping')
+ op.drop_table('unstructured_discord_data')
+ op.drop_table('role_master')
+ op.drop_table('leaderboard')
+ op.drop_table('github_profile_data')
+ # ### end Alembic commands ###
diff --git a/shared_migrations/migrations/versions/__pycache__/8d1e6a7e959a_initial_migration.cpython-310.pyc b/shared_migrations/migrations/versions/__pycache__/8d1e6a7e959a_initial_migration.cpython-310.pyc
new file mode 100644
index 0000000..eb3b18f
Binary files /dev/null and b/shared_migrations/migrations/versions/__pycache__/8d1e6a7e959a_initial_migration.cpython-310.pyc differ
diff --git a/shared_migrations/requirements.txt b/shared_migrations/requirements.txt
new file mode 100644
index 0000000..7b9da29
Binary files /dev/null and b/shared_migrations/requirements.txt differ
diff --git a/shared_migrations/sample.env b/shared_migrations/sample.env
new file mode 100644
index 0000000..0e971de
--- /dev/null
+++ b/shared_migrations/sample.env
@@ -0,0 +1,5 @@
+POSTGRES_DB_HOST=""
+POSTGRES_DB_NAME=""
+POSTGRES_DB_USER="
+POSTGRES_DB_PASS=""
+DATABASE_URL=""
\ No newline at end of file