Skip to content

add dataclasses #106

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Mar 29, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 6 additions & 15 deletions ForgejoRepoAPI.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,12 @@
from interface_wrapper import (
logging,
IRepositoryAPI,
Repository,
Commit,
Branch,
User,
Contributor,
Issue,
PullRequest,
WikiPage,
Comment,
Invite,
)
import base64
import sys
from pyforgejo import PyforgejoApi

import isodate
from pyforgejo import PyforgejoApi

from interface_wrapper import (Branch, Comment, Commit, Contributor, Invite,
IRepositoryAPI, Issue, PullRequest, Repository,
User, WikiPage, logging)


class ForgejoRepoAPI(IRepositoryAPI):
Expand Down
17 changes: 3 additions & 14 deletions GitHubRepoAPI.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,6 @@
from interface_wrapper import (
logging,
Repository,
Contributor,
Commit,
Issue,
PullRequest,
WikiPage,
Branch,
IRepositoryAPI,
User,
Comment,
Invite,
)
from interface_wrapper import (Branch, Comment, Commit, Contributor, Invite,
IRepositoryAPI, Issue, PullRequest, Repository,
User, WikiPage, logging)


class GitHubRepoAPI(IRepositoryAPI):
Expand Down
58 changes: 29 additions & 29 deletions commits_parser.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,22 @@
from utils import logger
import pytz
from dataclasses import asdict, dataclass
from time import sleep

# from github import Github, Repository, GithubException, PullRequest
import pytz

from constants import EMPTY_FIELD, GOOGLE_MAX_CELL_LEN, TIMEDELTA, TIMEZONE
from interface_wrapper import IRepositoryAPI, Repository
from utils import logger


EMPTY_FIELD = 'Empty field'
TIMEDELTA = 0.05
TIMEZONE = 'Europe/Moscow'
FIELDNAMES = (
'repository name',
'author name',
'author login',
'author email',
'date and time',
'changed files',
'commit id',
'branch',
)
GOOGLE_MAX_CELL_LEN = 50000
@dataclass(kw_only=True, frozen=True)
class CommitData:
repository_name: str = ''
author_name: str = ''
author_email: str = ''
datetime: str = ''
changed_files: str = ''
commit_id: str = ''
branch: str = ''


def log_repository_commits(
Expand All @@ -45,18 +43,19 @@ def log_repository_commits(
continue

changed_files = '; '.join([file for file in commit.files])
commit_data = [
repository.name,
commit.author.username,
commit.author.email or EMPTY_FIELD,
commit.date,
changed_files[:GOOGLE_MAX_CELL_LEN],
commit._id,
branch,
]
info = dict(zip(FIELDNAMES, commit_data))
changed_files = changed_files[:GOOGLE_MAX_CELL_LEN]
commit_data = CommitData(
repository_name=repository.name,
author_name=commit.author.username,
author_email=commit.author.email or EMPTY_FIELD,
datetime=commit.date.astimezone(pytz.timezone(TIMEZONE)).isoformat(),
changed_files=changed_files,
commit_id=commit._id,
branch=branch,
)
info = asdict(commit_data)

logger.log_to_csv(csv_name, FIELDNAMES, info)
logger.log_to_csv(csv_name, list(info.keys()), info)
logger.log_to_stdout(info)

sleep(TIMEDELTA)
Expand All @@ -65,7 +64,8 @@ def log_repository_commits(
def log_commits(
client: IRepositoryAPI, working_repos, csv_name, start, finish, branch, fork_flag
):
logger.log_to_csv(csv_name, FIELDNAMES)
info = asdict(CommitData())
logger.log_to_csv(csv_name, list(info.keys()))

for repo, token in working_repos:
try:
Expand Down
18 changes: 18 additions & 0 deletions constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
EMPTY_FIELD = 'Empty field'
TIMEDELTA = 0.05
TIMEZONE = 'Europe/Moscow'
GOOGLE_MAX_CELL_LEN = 50000
TITLE_LEN = 80
MIN_SIDE_PADDING = 4
SIDE_WHITE_SPACES = 1
WIKI_FIELDNAMES = [
'repository name',
'author name',
'author login',
'datetime',
'page',
'action',
'revision id',
'added lines',
'deleted lines',
]
73 changes: 38 additions & 35 deletions contributors_parser.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
from utils import logger
from dataclasses import asdict, dataclass
from time import sleep
from typing import Generator

from constants import EMPTY_FIELD, TIMEDELTA
from interface_wrapper import IRepositoryAPI, Repository
from utils import logger


EMPTY_FIELD = 'Empty field'
TIMEDELTA = 0.05
TIMEZONE = 'Europe/Moscow'
FIELDNAMES = (
'repository name',
'login',
'name',
'email',
'url',
'permissions',
'total_commits',
'id',
'node_id',
'type',
'bio',
'site_admin',
)
@dataclass(kw_only=True, frozen=True)
class ContributorData:
repository_name: str = ''
login: str = ''
name: str = ''
email: str = ''
url: str = ''
permissions: str = ''
total_commits: int = 0
node_id: str = ''
type: str = ''
bio: str = ''
site_admin: bool = False


def log_repository_contributors(
Expand All @@ -36,22 +36,24 @@ def nvl(val):
repository, contributor
)

info_tmp = {
'repository name': repository.name,
'login': contributor.login,
'name': nvl(contributor.username),
'email': nvl(contributor_stat['email']),
'url': contributor.html_url,
'permissions': nvl(contributor_permissions),
'total_commits': contributor_stat['total_commits'],
'node_id': contributor.node_id,
'type': contributor.type,
'bio': nvl(contributor.bio),
'site_admin': contributor.site_admin,
}

logger.log_to_csv(csv_name, FIELDNAMES, info_tmp)
logger.log_to_stdout(info_tmp)
contributor_data = ContributorData(
repository_name=repository.name,
login=contributor.login,
name=nvl(contributor.username),
email=nvl(contributor_stat['email']),
url=contributor.html_url,
permissions=nvl(contributor_permissions),
total_commits=contributor_stat['total_commits'],
node_id=contributor.node_id,
type=contributor.type,
bio=nvl(contributor.bio),
site_admin=contributor.site_admin,
)

info_dict = asdict(contributor_data)

logger.log_to_csv(csv_name, list(info_dict.keys()), info_dict)
logger.log_to_stdout(info_dict)

sleep(TIMEDELTA)

Expand Down Expand Up @@ -80,7 +82,8 @@ def get_contributors_stats(client: IRepositoryAPI, repository: Repository) -> di
def log_contributors(
client: IRepositoryAPI, working_repos: Generator, csv_name: str, fork_flag: bool
):
logger.log_to_csv(csv_name, FIELDNAMES)
info = asdict(ContributorData())
logger.log_to_csv(csv_name, list(info.keys()))

for repo, token in working_repos:
try:
Expand Down
2 changes: 1 addition & 1 deletion export_sheets.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pygsheets
import pandas as pd
import pygsheets

INT_MASS = [{"one": 1, "two": 2, "what?": 3}]

Expand Down
6 changes: 2 additions & 4 deletions git_logger.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
from interface_wrapper import RepositoryFactory, IRepositoryAPI
from time import sleep

TIMEDELTA = 0.05
TIMEZONE = 'Europe/Moscow'
from constants import TIMEDELTA
from interface_wrapper import IRepositoryAPI, RepositoryFactory


def login(source, token, base_url):
Expand Down Expand Up @@ -69,7 +68,6 @@ def get_next_repo(clients: GitClients, repositories):
print(f'get_next_repo(): failed to load repository "{repo_name}"')
exit(1)
else:
print(cur_client['token'])
yield repo, cur_client['token']


Expand Down
6 changes: 3 additions & 3 deletions interface_wrapper.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
from abc import ABC, abstractmethod
from datetime import datetime
from dataclasses import dataclass
import logging
from datetime import datetime

from github import Github
from pyforgejo import PyforgejoApi
Expand Down Expand Up @@ -178,8 +178,8 @@ class RepositoryFactory:
def create_api(
source: str, token: str, base_url: str | None = None
) -> IRepositoryAPI:
from GitHubRepoAPI import GitHubRepoAPI
from ForgejoRepoAPI import ForgejoRepoAPI
from GitHubRepoAPI import GitHubRepoAPI

if source == 'github':
return GitHubRepoAPI(Github(token))
Expand Down
52 changes: 23 additions & 29 deletions invites_parser.py
Original file line number Diff line number Diff line change
@@ -1,45 +1,39 @@
from utils import logger
from dataclasses import asdict, dataclass
from time import sleep
from interface_wrapper import IRepositoryAPI, Repository

FIELDNAMES = (
'repository name',
'invited login',
'invite creation date',
'invitation url',
)
TIMEDELTA = 0.05
from constants import TIMEDELTA
from interface_wrapper import IRepositoryAPI, Repository
from utils import logger


def log_inviter(repo, invite, writer):
invite_info = [
repo.full_name,
invite.invitee.login,
invite.created_at.strftime("%d/%m/%Y, %H:%M:%S"),
invite.html_url,
]
writer.writerow(invite_info)
print(invite_info)
@dataclass(kw_only=True, frozen=True)
class InviteData:
repository_name: str = ''
invited_login: str = ''
invite_creation_date: str = ''
invitation_url: str = ''


def log_repository_invitations(
client: IRepositoryAPI, repository: Repository, csv_name
client: IRepositoryAPI, repository: Repository, csv_name: str
):
invitations = client.get_invites(repository)
for invite in invitations:
invite_info = {
'repository name': repository.name,
'invited login': invite.invitee.login,
'invite creation date': invite.created_at.strftime("%d/%m/%Y, %H:%M:%S"),
'invitation url': invite.html_url,
}
logger.log_to_csv(csv_name, FIELDNAMES, invite_info)
logger.log_to_stdout(invite_info)
invite_data = InviteData(
repository_name=repository.name,
invited_login=invite.invitee.login,
invite_creation_date=invite.created_at.strftime("%d/%m/%Y, %H:%M:%S"),
invitation_url=invite.html_url,
)
invite_dict = asdict(invite_data)
logger.log_to_csv(csv_name, list(invite_dict.keys()), invite_dict)
logger.log_to_stdout(invite_dict)
sleep(TIMEDELTA)


def log_invitations(client: IRepositoryAPI, working_repos, csv_name):
logger.log_to_csv(csv_name, FIELDNAMES)
def log_invitations(client: IRepositoryAPI, working_repos, csv_name: str):
info = asdict(InviteData())
logger.log_to_csv(csv_name, list(info.keys()))

for repo, token in working_repos:
logger.log_title(repo.name)
Expand Down
Loading