From 2c1869cd9e921540ddca8a81979943ebef1f0205 Mon Sep 17 00:00:00 2001 From: Ben Date: Mon, 26 Jun 2023 01:25:36 +0000 Subject: [PATCH 01/38] get creds from secret --- marking_and_admin/mark_functions.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index c29d421..da52344 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -65,9 +65,13 @@ def build_spreadsheet_service(): if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: + with open("temp_spreadsheet_creds.json", "w", encoding="utf-8") as tsc: + tsc.write(os.getenv("SPREADSHEET_CREDS", "")) flow = InstalledAppFlow.from_client_secrets_file( - "marking_and_admin/credentials.json", scopes + "temp_spreadsheet_creds.json", scopes ) + with open("temp_spreadsheet_creds.json", "w", encoding="utf-8") as tsc: + tsc.write("") try: creds = flow.run_local_server() except OSError as os_e: From 0309870dbdbdfeed86349054afbfc0cedcda3760 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 27 Jun 2023 01:45:36 +0000 Subject: [PATCH 02/38] don't commit creds --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index f518266..4a2c546 100644 --- a/.gitignore +++ b/.gitignore @@ -24,3 +24,4 @@ avatar.jpg dc-env student.json dict_cache.json +temp_spreadsheet_creds.json From 7bab8e053a5f309ed67090195750a62abb11fb59 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 27 Jun 2023 01:46:09 +0000 Subject: [PATCH 03/38] desparation, trying to make this work in codespaces --- marking_and_admin/mark_functions.py | 89 +++++++++++++++++++++++------ marking_and_admin/marking_puller.py | 3 +- 2 files changed, 72 insertions(+), 20 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index da52344..f0ef1e2 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -50,10 +50,8 @@ def Run(self): def build_spreadsheet_service(): # If modifying these scopes, delete the file token.pickle. scopes = ["https://www.googleapis.com/auth/spreadsheets"] - """Shows basic usage of the Sheets API. - Prints values from a sample spreadsheet. - """ creds = None + # Shows basic usage of the Sheets API. Prints values from a sample spreadsheet. # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. @@ -68,15 +66,20 @@ def build_spreadsheet_service(): with open("temp_spreadsheet_creds.json", "w", encoding="utf-8") as tsc: tsc.write(os.getenv("SPREADSHEET_CREDS", "")) flow = InstalledAppFlow.from_client_secrets_file( - "temp_spreadsheet_creds.json", scopes + "temp_spreadsheet_creds.json", + scopes, + redirect_uri="https://design-computing.github.io/", ) - with open("temp_spreadsheet_creds.json", "w", encoding="utf-8") as tsc: - tsc.write("") + # with open("temp_spreadsheet_creds.json", "w", encoding="utf-8") as tsc: + # tsc.write("") try: + pass creds = flow.run_local_server() except OSError as os_e: print(os_e) creds = flow.run_console() + except Exception as eeee: + print(eeee) # Save the credentials for the next run with open("token.pickle", "wb") as token: pickle.dump(creds, token) @@ -202,8 +205,10 @@ def get_forks( api = "https://api.github.com/" limit = 100 # TODO: #29 take these secrets out, put them in an env, and reset them - client_id = os.getenv("CLIENT_ID_GITHUB","") # "040e86e3feed633710a0" - secret = os.getenv("SECRET_GITHUB","")#"69588d73388091b5ff8635fd1a788ea79177bf69" + client_id = os.getenv("CLIENT_ID_GITHUB", "") # "040e86e3feed633710a0" + secret = os.getenv( + "SECRET_GITHUB", "" + ) # "69588d73388091b5ff8635fd1a788ea79177bf69" url = ( f"{api}/repos/{org}/{repo}/forks?" f"per_page={limit}&" @@ -246,7 +251,7 @@ def rate_limit_message(r): def update_repos(row: Series) -> str: """Git clone a repo, or if already cloned, git pull.""" url = row["git_url"] - https_url = url.replace("git://","https://") + https_url = url.replace("git://", "https://") owner = row["owner"] path = os.path.normpath(os.path.join(ROOTDIR, owner)) t = datetime.now().strftime("%H:%M:%S") @@ -408,6 +413,16 @@ def get_readmes(row, output="mark", print_labbooks=False): return [mark, all_readme] +def get_readme_text(row) -> str: + """Get the collected text of all the readme files.""" + return get_readmes(row, output="textList", print_labbooks=False) + + +def get_readme_mark(row) -> int: + """Get the number of readmen files that are filled in.""" + return get_readmes(row, output="mark", print_labbooks=False) + + def test_in_clean_environment( row: Series, set_number: int, @@ -499,13 +514,13 @@ def mark_a_specific_person_week( results_dict = json.loads(contents) results_dict["bigerror"] = ":)" log_progress(f" good for w{set_number}\n", logfile_name) - except Exception as e: + except Exception as mystery_exception: results_dict = { - "bigerror": str(e).replace(",", "~"), + "bigerror": str(mystery_exception).replace(",", "~"), "gh_username": row.owner, } # the comma messes with the csv - log_progress(f" bad {e} w{set_number}\n", logfile_name) + log_progress(f" bad {mystery_exception} w{set_number}\n", logfile_name) elapsed_time = time.time() - start_time results_dict["time"] = elapsed_time @@ -630,6 +645,7 @@ def do_the_marking( mark_w4=False, mark_w5=False, mark_exam=False, + test_number_of_students=0, ): global THIS_YEAR THIS_YEAR = this_year @@ -653,6 +669,8 @@ def do_the_marking( students = get_student_data() mark_sheet = pd.DataFrame(students) + if test_number_of_students > 0: + mark_sheet = mark_sheet.head(test_number_of_students) deets = pd.DataFrame(list(mark_sheet.apply(get_details, axis=1))) # temp: @@ -672,20 +690,53 @@ def do_the_marking( ) mark_sheet.drop(["name"], axis=1, errors="ignore", inplace=True) - mark_sheet["readme_mark"] = mark_sheet.apply(get_readmes, args=("mark",), axis=1) - mark_sheet["readme_text"] = mark_sheet.apply( - get_readmes, args=("textList",), axis=1 - ) + mark_sheet["readme_mark"] = mark_sheet.apply(get_readme_mark, axis=1) + mark_sheet["readme_text"] = mark_sheet.apply(get_readme_text, axis=1) + use_nice_spreadsheet_connection = False + if not use_nice_spreadsheet_connection: + convert_result_dicts_to_ints(mark_sheet) mark_sheet.to_csv(MARKS_CSV) - data = [list(x) for x in mark_sheet.to_numpy()] - service = build_spreadsheet_service() - write(service, data=data) + if use_nice_spreadsheet_connection: + data = [list(x) for x in mark_sheet.to_numpy()] + service = build_spreadsheet_service() + write(service, data=data) print("that took", (time.time() - start_time) / 60, "minutes") +def convert_result_dicts_to_ints(mark_sheet): + """Convert the dict of results into a single mark. + + dict looks like this: + { + 'of_total': 2, + 'mark': 0, + 'results': [ + {'value': 0, 'name': 'Exercise 2: debug the file'}, + {'value': 0, 'name': 'Lab book entry completed'} + ], + 'week_number': 2, + 'localError': ':)', + 'repo_owner': 'rhiannon84', + 'bigerror': ':)', + 'time': 4.783979892730713 + } + so we're just pulling the mark out, but it's fraught, so there's some checking. + """ + + def convert_d_to_i(results_dict) -> int: + try: + return results_dict.get("mark", 0) + except AttributeError as attr_err: + print(attr_err) + return 0 + + for i in range(1, 6): + mark_sheet[f"set{i}"] = mark_sheet[f"set{i}"].apply(convert_d_to_i) + + def get_student_data(): # TODO: instead of loading the pickle, load the marks.csv file so that # the dataframe is preloaded with values. Then it doesn't need to mark students diff --git a/marking_and_admin/marking_puller.py b/marking_and_admin/marking_puller.py index 9b8aa8c..5b63763 100644 --- a/marking_and_admin/marking_puller.py +++ b/marking_and_admin/marking_puller.py @@ -27,8 +27,9 @@ marks_csv="marks.csv", mark_w1=True, mark_w2=True, - mark_w3=False, + mark_w3=True, mark_w4=False, mark_w5=False, mark_exam=False, + test_number_of_students=2, # if more than 0, will only mark the first n ) From abb393495acf5563c7a3d5ac69bedc900be061d7 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 27 Jun 2023 02:39:55 +0000 Subject: [PATCH 04/38] add type hints and start a docstring --- marking_and_admin/mark_functions.py | 41 ++++++++++++++++++++--------- 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index f0ef1e2..1f3ea41 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -633,20 +633,35 @@ def mark_week( def do_the_marking( - this_year="2023", - rootdir="../StudentRepos", - chatty=False, + this_year: str = "2023", + rootdir: str = "../StudentRepos", + chatty: bool = False, force_marking=False, - marking_spreadsheet_id="16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY", # 2022 - marks_csv="marks.csv", - mark_w1=True, - mark_w2=False, - mark_w3=False, - mark_w4=False, - mark_w5=False, - mark_exam=False, - test_number_of_students=0, -): + marking_spreadsheet_id: str = "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY", # 2022 + marks_csv: str = "marks.csv", + mark_w1:bool=True, + mark_w2:bool=False, + :bool=False, + mark_w4:bool=False, + mark_w5:bool=False, + mark_exam:bool=False, + test_number_of_students:int=0, +)->None: + """do_the_marking Runs tests against all student work. + + Args: + this_year (str, optional): The year that you want to test. Defaults to "2023". + rootdir (str, optional): Where you want to keep all the repos you're working with. Defaults to "../StudentRepos". + chatty (bool, optional): Do you want it to be verbose? Defaults to False. + force_marking (bool, optional): _description_. Defaults to False. + marking_spreadsheet_id (str, optional): _description_. Defaults to "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY". + mark_w1 (bool, optional): _description_. Defaults to True. + mark_w2 (bool, optional): _description_. Defaults to False. + mark_w4 (bool, optional): _description_. Defaults to False. + mark_w5 (bool, optional): _description_. Defaults to False. + mark_exam (bool, optional): _description_. Defaults to False. + test_number_of_students (int, optional): _description_. Defaults to 0. + """ global THIS_YEAR THIS_YEAR = this_year global ROOTDIR From 0823761897dd78455a7b8ea106bf9e9c96fe98f3 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 27 Jun 2023 04:58:02 +0000 Subject: [PATCH 05/38] more specific exception type --- marking_and_admin/mark_functions.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index 1f3ea41..16ca84d 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -514,13 +514,13 @@ def mark_a_specific_person_week( results_dict = json.loads(contents) results_dict["bigerror"] = ":)" log_progress(f" good for w{set_number}\n", logfile_name) - except Exception as mystery_exception: + except json.JSONDecodeError as json_exception: results_dict = { - "bigerror": str(mystery_exception).replace(",", "~"), + "bigerror": str(json_exception).replace(",", "~"), "gh_username": row.owner, } # the comma messes with the csv - log_progress(f" bad {mystery_exception} w{set_number}\n", logfile_name) + log_progress(f" bad {json_exception} w{set_number}\n", logfile_name) elapsed_time = time.time() - start_time results_dict["time"] = elapsed_time From 62793b40c35ce82b5605289fe88b23671bfe09e4 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 27 Jun 2023 04:58:22 +0000 Subject: [PATCH 06/38] fix args --- marking_and_admin/mark_functions.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index 16ca84d..bc4f062 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -639,14 +639,14 @@ def do_the_marking( force_marking=False, marking_spreadsheet_id: str = "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY", # 2022 marks_csv: str = "marks.csv", - mark_w1:bool=True, - mark_w2:bool=False, - :bool=False, - mark_w4:bool=False, - mark_w5:bool=False, - mark_exam:bool=False, - test_number_of_students:int=0, -)->None: + mark_w1: bool = True, + mark_w2: bool = False, + mark_w3: bool = False, + mark_w4: bool = False, + mark_w5: bool = False, + mark_exam: bool = False, + test_number_of_students: int = 0, +) -> None: """do_the_marking Runs tests against all student work. Args: @@ -657,6 +657,7 @@ def do_the_marking( marking_spreadsheet_id (str, optional): _description_. Defaults to "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY". mark_w1 (bool, optional): _description_. Defaults to True. mark_w2 (bool, optional): _description_. Defaults to False. + mark_w3 (bool, optional): _description_. Defaults to False. mark_w4 (bool, optional): _description_. Defaults to False. mark_w5 (bool, optional): _description_. Defaults to False. mark_exam (bool, optional): _description_. Defaults to False. From 05bdf6dc575eebe0c91d33ade767a3946fbdc965 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 27 Jun 2023 04:58:36 +0000 Subject: [PATCH 07/38] set up for w4 --- marking_and_admin/marking_puller.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/marking_and_admin/marking_puller.py b/marking_and_admin/marking_puller.py index 5b63763..fe555a5 100644 --- a/marking_and_admin/marking_puller.py +++ b/marking_and_admin/marking_puller.py @@ -28,8 +28,8 @@ mark_w1=True, mark_w2=True, mark_w3=True, - mark_w4=False, + mark_w4=True, mark_w5=False, mark_exam=False, - test_number_of_students=2, # if more than 0, will only mark the first n + test_number_of_students=0, # if more than 0, will only mark the first N ) From 4e1ee7fc32fd28eba2bd0bd81f66202060d3abe1 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 27 Jun 2023 04:58:54 +0000 Subject: [PATCH 08/38] auth example file --- marking_and_admin/google_auth_test.py | 76 +++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 marking_and_admin/google_auth_test.py diff --git a/marking_and_admin/google_auth_test.py b/marking_and_admin/google_auth_test.py new file mode 100644 index 0000000..fb93c92 --- /dev/null +++ b/marking_and_admin/google_auth_test.py @@ -0,0 +1,76 @@ +# import os + +# from google_auth_oauthlib.flow import Flow + +# with open("temp_spreadsheet_creds.json", "w", encoding="utf-8") as tsc: +# tsc.write(os.getenv("SPREADSHEET_CREDS", "")) +# # Create the flow using the client secrets file from the Google API +# # Console. +# flow = Flow.from_client_secrets_file( +# "temp_spreadsheet_creds.json", +# scopes=["https://www.googleapis.com/auth/spreadsheets"], +# redirect_uri="https://design-computing.github.io/", +# ) + +# # Tell the user to go to the authorization URL. +# auth_url, _ = flow.authorization_url(https://github.com/prompt=%22consent") + +# print(f"Please go to this URL:\n\n{auth_url}\n") + +# # The user will get an authorization code. This code is used to get the +# # access token. +# code = input("Enter the authorization code: ") +# flow.fetch_token(code=code) + +# # You can use flow.credentials, or you can just get a requests session +# # using flow.authorized_session. +# session = flow.authorized_session() +# print(session.get("https://www.googleapis.com/userinfo/v2/me").json()) + + +import os +import pprint + +import google.oauth2.credentials +from google_auth_oauthlib.flow import InstalledAppFlow +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError + +pp = pprint.PrettyPrinter(indent=2) + +# The CLIENT_SECRETS_FILE variable specifies the name of a file that contains +# the OAuth 2.0 information for this application, including its client_id and +# client_secret. +with open("temp_spreadsheet_creds.json", "w", encoding="utf-8") as tsc: + tsc.write(os.getenv("SPREADSHEET_CREDS", "")) +CLIENT_SECRETS_FILE = "temp_spreadsheet_creds.json" + +# This access scope grants read-only access to the authenticated user's Drive +# account. +SCOPES = ["https://www.googleapis.com/auth/spreadsheets"] +API_SERVICE_NAME = "spreadsheets" +API_VERSION = "v4" + + +def get_authenticated_service(): + flow = InstalledAppFlow.from_client_secrets_file( + CLIENT_SECRETS_FILE, + SCOPES, + redirect_uri="https://design-computing.github.io/", + ) + credentials = flow.run_console() + return build(API_SERVICE_NAME, API_VERSION, credentials=credentials) + + +def list_drive_files(service, **kwargs): + results = service.files().list(**kwargs).execute() + + pp.pprint(results) + + +if __name__ == "__main__": + # When running locally, disable OAuthlib's HTTPs verification. When + # running in production *do not* leave this option enabled. + os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1" + service = get_authenticated_service() + list_drive_files(service, orderBy="modifiedByMeTime desc", pageSize=5) From e9ba5c2692cca9da7f65be9043ae25f163c58874 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 27 Jun 2023 04:59:34 +0000 Subject: [PATCH 09/38] requirements for google auth seems like it needs specific versions --- requirements.in | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/requirements.in b/requirements.in index 906a164..ee6e5a4 100644 --- a/requirements.in +++ b/requirements.in @@ -3,11 +3,10 @@ colorama func_timeout GitPython google -google_auth_oauthlib -google-api-python-client -google-auth -google-auth-httplib2 -google-auth-oauthlib +google-api-python-client==1.7.2 +google-auth==1.8.0 +google-auth-httplib2==0.0.3 +google-auth-oauthlib==0.4.1 matplotlib mock mypy @@ -19,3 +18,5 @@ requests ruamel.yaml types-requests wheel + +# pip install --force-reinstall -v "google-api-python-client==1.7.2" "google-auth==1.8.0" "google-auth-httplib2==0.0.3" "google-auth-oauthlib==0.4.1" From fdb0611e78c2c826f3b8793d64224c3c1ba773e1 Mon Sep 17 00:00:00 2001 From: Ben Date: Wed, 28 Jun 2023 07:03:00 +0000 Subject: [PATCH 10/38] lint, add a doc string --- marking_and_admin/all week's tests.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/marking_and_admin/all week's tests.py b/marking_and_admin/all week's tests.py index e8d40e2..20747d1 100644 --- a/marking_and_admin/all week's tests.py +++ b/marking_and_admin/all week's tests.py @@ -1,3 +1,5 @@ +"""Run all the weeks of one person's tests. +""" import os for i in range(1, 10): From d9a3fccc6c6d2f5c84a0474e5a3b5e45a9a2745a Mon Sep 17 00:00:00 2001 From: Ben Date: Wed, 28 Jun 2023 07:03:10 +0000 Subject: [PATCH 11/38] rename, lint --- marking_and_admin/{gitTest.py => git_test.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename marking_and_admin/{gitTest.py => git_test.py} (100%) diff --git a/marking_and_admin/gitTest.py b/marking_and_admin/git_test.py similarity index 100% rename from marking_and_admin/gitTest.py rename to marking_and_admin/git_test.py From 509d11ad92a523b41ffe0ecc96cd8d71e444b064 Mon Sep 17 00:00:00 2001 From: Ben Date: Wed, 28 Jun 2023 07:03:49 +0000 Subject: [PATCH 12/38] rename, lint --- marking_and_admin/{labInspector.py => lab_inspector.py} | 0 marking_and_admin/{lambdaTester.py => lambda_tester.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename marking_and_admin/{labInspector.py => lab_inspector.py} (100%) rename marking_and_admin/{lambdaTester.py => lambda_tester.py} (100%) diff --git a/marking_and_admin/labInspector.py b/marking_and_admin/lab_inspector.py similarity index 100% rename from marking_and_admin/labInspector.py rename to marking_and_admin/lab_inspector.py diff --git a/marking_and_admin/lambdaTester.py b/marking_and_admin/lambda_tester.py similarity index 100% rename from marking_and_admin/lambdaTester.py rename to marking_and_admin/lambda_tester.py From 12fa6d5413943b3e01cdc4dedb9d3508b0e03589 Mon Sep 17 00:00:00 2001 From: Ben Date: Wed, 28 Jun 2023 07:04:23 +0000 Subject: [PATCH 13/38] rename_lint --- marking_and_admin/{testFixer.py => test_fixer.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename marking_and_admin/{testFixer.py => test_fixer.py} (100%) diff --git a/marking_and_admin/testFixer.py b/marking_and_admin/test_fixer.py similarity index 100% rename from marking_and_admin/testFixer.py rename to marking_and_admin/test_fixer.py From 4784cd083efe90c40919c1725c5fa1be54bd352c Mon Sep 17 00:00:00 2001 From: Ben Date: Wed, 28 Jun 2023 07:11:10 +0000 Subject: [PATCH 14/38] make the linter happy --- marking_and_admin/test_shim.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/marking_and_admin/test_shim.py b/marking_and_admin/test_shim.py index 79c5817..5fc4be3 100644 --- a/marking_and_admin/test_shim.py +++ b/marking_and_admin/test_shim.py @@ -25,14 +25,15 @@ def do_the_test(repo_path): test = importlib.util.module_from_spec(spec) spec.loader.exec_module(test) print("about to test", repo_path) - r = test.theTests(repo_path) - r["localError"] = ":)" - return r - except Exception as e: + results = test.theTests(repo_path) + results["localError"] = ":)" + return results + except Exception as mystery_error: return { "of_total": 0, "mark": 0, - "localError": str(e).replace(",", "~"), # the comma messes with the csv + "localError": str(mystery_error).replace(",", "~"), + # the comma messes with the csv } @@ -70,9 +71,9 @@ def results_as_json(repo_path): """ ) -with open("temp_results.json", "w") as temp_results: - results = results_as_json(REPO_PATH) - temp_results.write(results) +with open("temp_results.json", "w", encoding="utf-8") as temp_results: + test_results = results_as_json(REPO_PATH) + temp_results.write(test_results) sleep(0.50) sleep(0.50) From 6b73c7feb36d61e6f8975242119f2373d181d8f2 Mon Sep 17 00:00:00 2001 From: Ben Date: Wed, 28 Jun 2023 07:11:18 +0000 Subject: [PATCH 15/38] old and obsolete --- marking_and_admin/jest.config.js | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 marking_and_admin/jest.config.js diff --git a/marking_and_admin/jest.config.js b/marking_and_admin/jest.config.js deleted file mode 100644 index e69de29..0000000 From f5928188d16fb4bdd9e732e0f028ffffd6fa08f7 Mon Sep 17 00:00:00 2001 From: Ben Date: Wed, 28 Jun 2023 07:13:34 +0000 Subject: [PATCH 16/38] move these here to tidy up let's see if anyone misses them? --- marking_and_admin/{ => old_code}/all week's tests.py | 0 marking_and_admin/{ => old_code}/git_test.py | 0 marking_and_admin/{ => old_code}/google_auth_test.py | 0 marking_and_admin/{ => old_code}/how_did_x_do_y.py | 0 marking_and_admin/{ => old_code}/lab_inspector.py | 0 marking_and_admin/{ => old_code}/lambda_tester.py | 0 marking_and_admin/{ => old_code}/old_css.css | 0 marking_and_admin/{ => old_code}/quickstart.py | 0 marking_and_admin/{ => old_code}/test_fixer.py | 0 marking_and_admin/{ => old_code}/tester.py | 0 10 files changed, 0 insertions(+), 0 deletions(-) rename marking_and_admin/{ => old_code}/all week's tests.py (100%) rename marking_and_admin/{ => old_code}/git_test.py (100%) rename marking_and_admin/{ => old_code}/google_auth_test.py (100%) rename marking_and_admin/{ => old_code}/how_did_x_do_y.py (100%) rename marking_and_admin/{ => old_code}/lab_inspector.py (100%) rename marking_and_admin/{ => old_code}/lambda_tester.py (100%) rename marking_and_admin/{ => old_code}/old_css.css (100%) rename marking_and_admin/{ => old_code}/quickstart.py (100%) rename marking_and_admin/{ => old_code}/test_fixer.py (100%) rename marking_and_admin/{ => old_code}/tester.py (100%) diff --git a/marking_and_admin/all week's tests.py b/marking_and_admin/old_code/all week's tests.py similarity index 100% rename from marking_and_admin/all week's tests.py rename to marking_and_admin/old_code/all week's tests.py diff --git a/marking_and_admin/git_test.py b/marking_and_admin/old_code/git_test.py similarity index 100% rename from marking_and_admin/git_test.py rename to marking_and_admin/old_code/git_test.py diff --git a/marking_and_admin/google_auth_test.py b/marking_and_admin/old_code/google_auth_test.py similarity index 100% rename from marking_and_admin/google_auth_test.py rename to marking_and_admin/old_code/google_auth_test.py diff --git a/marking_and_admin/how_did_x_do_y.py b/marking_and_admin/old_code/how_did_x_do_y.py similarity index 100% rename from marking_and_admin/how_did_x_do_y.py rename to marking_and_admin/old_code/how_did_x_do_y.py diff --git a/marking_and_admin/lab_inspector.py b/marking_and_admin/old_code/lab_inspector.py similarity index 100% rename from marking_and_admin/lab_inspector.py rename to marking_and_admin/old_code/lab_inspector.py diff --git a/marking_and_admin/lambda_tester.py b/marking_and_admin/old_code/lambda_tester.py similarity index 100% rename from marking_and_admin/lambda_tester.py rename to marking_and_admin/old_code/lambda_tester.py diff --git a/marking_and_admin/old_css.css b/marking_and_admin/old_code/old_css.css similarity index 100% rename from marking_and_admin/old_css.css rename to marking_and_admin/old_code/old_css.css diff --git a/marking_and_admin/quickstart.py b/marking_and_admin/old_code/quickstart.py similarity index 100% rename from marking_and_admin/quickstart.py rename to marking_and_admin/old_code/quickstart.py diff --git a/marking_and_admin/test_fixer.py b/marking_and_admin/old_code/test_fixer.py similarity index 100% rename from marking_and_admin/test_fixer.py rename to marking_and_admin/old_code/test_fixer.py diff --git a/marking_and_admin/tester.py b/marking_and_admin/old_code/tester.py similarity index 100% rename from marking_and_admin/tester.py rename to marking_and_admin/old_code/tester.py From 17e3e91416b5f8e214537aafae85f27705d260cd Mon Sep 17 00:00:00 2001 From: Ben Date: Wed, 28 Jun 2023 07:22:52 +0000 Subject: [PATCH 17/38] rename, make it more clearly the entry point --- marking_and_admin/{marking_puller.py => marker.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename marking_and_admin/{marking_puller.py => marker.py} (100%) diff --git a/marking_and_admin/marking_puller.py b/marking_and_admin/marker.py similarity index 100% rename from marking_and_admin/marking_puller.py rename to marking_and_admin/marker.py From 36bdaaa40965cb12f0af599a5bc13b2eb49cb7ec Mon Sep 17 00:00:00 2001 From: Ben Date: Wed, 28 Jun 2023 07:23:03 +0000 Subject: [PATCH 18/38] move out of the way --- marking_and_admin/{ => old_code}/inspector_shim.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename marking_and_admin/{ => old_code}/inspector_shim.py (100%) diff --git a/marking_and_admin/inspector_shim.py b/marking_and_admin/old_code/inspector_shim.py similarity index 100% rename from marking_and_admin/inspector_shim.py rename to marking_and_admin/old_code/inspector_shim.py From 751a4f289a05466985b8bd78de078673a75763b3 Mon Sep 17 00:00:00 2001 From: Ben Date: Sat, 1 Jul 2023 05:21:24 +0000 Subject: [PATCH 19/38] guard against the env not loading --- marking_and_admin/marker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/marking_and_admin/marker.py b/marking_and_admin/marker.py index fe555a5..e6d6b94 100644 --- a/marking_and_admin/marker.py +++ b/marking_and_admin/marker.py @@ -17,7 +17,7 @@ MARKING_SPREADSHEET_ID = os.getenv("GOOGLE_SHEETS_KEY", "") -if __name__ == "__main__": +if __name__ == "__main__" and MARKING_SPREADSHEET_ID != "": do_the_marking( this_year="2023", rootdir="../StudentRepos", From a7deeecc3032835551e80816619fdfd5ef72ba8c Mon Sep 17 00:00:00 2001 From: Ben Date: Sat, 1 Jul 2023 05:22:05 +0000 Subject: [PATCH 20/38] rename the file --- marking_and_admin/old_code/tester.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/marking_and_admin/old_code/tester.py b/marking_and_admin/old_code/tester.py index cf093d0..9f87fd3 100644 --- a/marking_and_admin/old_code/tester.py +++ b/marking_and_admin/old_code/tester.py @@ -3,7 +3,7 @@ import os import time -from marking_puller import RunCmd +from marker import RunCmd LOCAL = os.path.dirname(os.path.realpath(__file__)) week_number = 1 From c5a4ceed95ce27438bfdce044cbc246a0a269490 Mon Sep 17 00:00:00 2001 From: Ben Date: Sat, 1 Jul 2023 05:23:54 +0000 Subject: [PATCH 21/38] rename the file --- marking_and_admin/old_code/tester.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/marking_and_admin/old_code/tester.py b/marking_and_admin/old_code/tester.py index cf093d0..215c35b 100644 --- a/marking_and_admin/old_code/tester.py +++ b/marking_and_admin/old_code/tester.py @@ -3,7 +3,7 @@ import os import time -from marking_puller import RunCmd +from mark_functions import RunCmd LOCAL = os.path.dirname(os.path.realpath(__file__)) week_number = 1 From a74cc6fdaf52acaed617b919c3c3307a746de188 Mon Sep 17 00:00:00 2001 From: Ben Date: Sat, 1 Jul 2023 05:28:56 +0000 Subject: [PATCH 22/38] move IDs to readme --- marking_and_admin/Setting up a new year.md | 11 ++++++++++- marking_and_admin/marker.py | 6 ------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/marking_and_admin/Setting up a new year.md b/marking_and_admin/Setting up a new year.md index 45843ff..47525df 100644 --- a/marking_and_admin/Setting up a new year.md +++ b/marking_and_admin/Setting up a new year.md @@ -7,6 +7,16 @@ The file `marking_puller_2.py` is the entry point. - in Google drive, make a copy of last year's _Details & marking 2022_ file. - update the `MARKING_SPREADSHEET_ID` const with the ID from that new spreadsheet's URL. +In the past, the ID of the spreadsheet was hardcoded into the file. Now it's in the Codespace's env. + +``` +MARKING_SPREADSHEET_ID = "1wtTAM7A--ka7Lnog43L6jjo9kMCnDElCrTOBllEg4dA" # 2019 +MARKING_SPREADSHEET_ID = "1AjDu51VX26bIcLNMsr2iHq2BtrNEj91krxWKqjDW5aA" # 2020 +MARKING_SPREADSHEET_ID = "17KKMNIseRSo9IVNp-iaUCyEqbAR9tTYAcegzcvVgJFM" # 2021 +MARKING_SPREADSHEET_ID = "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY" # 2022 +MARKING_SPREADSHEET_ID = "1DPBVy9DiVkdFBArOTRtj3L--f62KTnxyFFZrUXrobV0" # 2023 +``` + ## To mark work for the first time - if this is a new computer, run `git config --global url."https://github.com/".insteadOf git@github.com:` or the git library will have a tantrum @@ -22,4 +32,3 @@ The file `marking_puller_2.py` is the entry point. TODO: - congratulate everyone who has a full set of passing tests - diff --git a/marking_and_admin/marker.py b/marking_and_admin/marker.py index e6d6b94..e9ae982 100644 --- a/marking_and_admin/marker.py +++ b/marking_and_admin/marker.py @@ -8,12 +8,6 @@ from mark_functions import do_the_marking -# The ID and range of a sample spreadsheet. -# MARKING_SPREADSHEET_ID = "1wtTAM7A--ka7Lnog43L6jjo9kMCnDElCrTOBllEg4dA" # 2019 -# MARKING_SPREADSHEET_ID = "1AjDu51VX26bIcLNMsr2iHq2BtrNEj91krxWKqjDW5aA" # 2020 -# MARKING_SPREADSHEET_ID = "17KKMNIseRSo9IVNp-iaUCyEqbAR9tTYAcegzcvVgJFM" # 2021 -# MARKING_SPREADSHEET_ID = "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY" # 2022 -# MARKING_SPREADSHEET_ID = "1DPBVy9DiVkdFBArOTRtj3L--f62KTnxyFFZrUXrobV0" # 2023 MARKING_SPREADSHEET_ID = os.getenv("GOOGLE_SHEETS_KEY", "") From ccb3defd214c1a6e4b7ea4c6e821f0b89ac994ba Mon Sep 17 00:00:00 2001 From: Ben Date: Sat, 1 Jul 2023 05:58:17 +0000 Subject: [PATCH 23/38] centralising --- marking_and_admin/marker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/marking_and_admin/marker.py b/marking_and_admin/marker.py index e9ae982..73a44a1 100644 --- a/marking_and_admin/marker.py +++ b/marking_and_admin/marker.py @@ -18,7 +18,7 @@ chatty=False, force_marking=True, marking_spreadsheet_id=MARKING_SPREADSHEET_ID, - marks_csv="marks.csv", + marks_csv="marking_and_admin/marks.csv", mark_w1=True, mark_w2=True, mark_w3=True, From 5e0e284510a6391f2f2056190a581f2a3b4225b6 Mon Sep 17 00:00:00 2001 From: Ben Date: Thu, 6 Jul 2023 04:52:25 +0000 Subject: [PATCH 24/38] Start to refactor force inclusion --- marking_and_admin/mark_functions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index bc4f062..8cf5bfc 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -763,7 +763,8 @@ def get_student_data(): with open(file_name, "r") as data_file: students = json.load(data_file) else: - students = get_forks(force_inclusion_of_these_repos=[]) + force_repos =["lvl-lim"] # TODO: move this into an arg in marker.py + students = get_forks(force_inclusion_of_these_repos=force_repos) with open("student.json", "w") as data_file: json.dump(students, data_file, indent=2) return students From 68f6ed50fba45a47cec824500a42987aea37da59 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 18 Jul 2023 05:36:45 +0000 Subject: [PATCH 25/38] sample the df not head --- marking_and_admin/mark_functions.py | 2 +- marking_and_admin/marker.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index 8cf5bfc..8c21ace 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -686,7 +686,7 @@ def do_the_marking( mark_sheet = pd.DataFrame(students) if test_number_of_students > 0: - mark_sheet = mark_sheet.head(test_number_of_students) + mark_sheet = mark_sheet.sample(test_number_of_students) deets = pd.DataFrame(list(mark_sheet.apply(get_details, axis=1))) # temp: diff --git a/marking_and_admin/marker.py b/marking_and_admin/marker.py index 73a44a1..b0e4f09 100644 --- a/marking_and_admin/marker.py +++ b/marking_and_admin/marker.py @@ -25,5 +25,5 @@ mark_w4=True, mark_w5=False, mark_exam=False, - test_number_of_students=0, # if more than 0, will only mark the first N + test_number_of_students=0, # if more than 0, will only mark a sample of N repos ) From 5266cb70fc977b183e13beb4dcde50c53c84684f Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 18 Jul 2023 05:40:19 +0000 Subject: [PATCH 26/38] make function name clearer --- marking_and_admin/mark_functions.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index 8c21ace..c3e20dd 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -742,7 +742,7 @@ def convert_result_dicts_to_ints(mark_sheet): so we're just pulling the mark out, but it's fraught, so there's some checking. """ - def convert_d_to_i(results_dict) -> int: + def convert_one_results_dict_to_an_int(results_dict) -> int: try: return results_dict.get("mark", 0) except AttributeError as attr_err: @@ -750,7 +750,9 @@ def convert_d_to_i(results_dict) -> int: return 0 for i in range(1, 6): - mark_sheet[f"set{i}"] = mark_sheet[f"set{i}"].apply(convert_d_to_i) + mark_sheet[f"set{i}"] = mark_sheet[f"set{i}"].apply( + convert_one_results_dict_to_an_int + ) def get_student_data(): From bf5ae0e735c4bd20d753303b575b6c28da4797cd Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 18 Jul 2023 05:40:47 +0000 Subject: [PATCH 27/38] add a backup source --- marking_and_admin/mark_functions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index c3e20dd..03ade08 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -750,6 +750,7 @@ def convert_one_results_dict_to_an_int(results_dict) -> int: return 0 for i in range(1, 6): + mark_sheet[f"set{i}_data"] = mark_sheet[f"set{i}"] mark_sheet[f"set{i}"] = mark_sheet[f"set{i}"].apply( convert_one_results_dict_to_an_int ) From 3ec634efa0d1b0884b9ffbc1c36b83e403912b06 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 18 Jul 2023 05:42:23 +0000 Subject: [PATCH 28/38] add the helpful file to the path not sure if it's actually doing anything --- marking_and_admin/marker.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/marking_and_admin/marker.py b/marking_and_admin/marker.py index b0e4f09..9d786d7 100644 --- a/marking_and_admin/marker.py +++ b/marking_and_admin/marker.py @@ -5,6 +5,7 @@ It can clone new repos if you delete the students pickle """ import os +import sys from mark_functions import do_the_marking @@ -12,6 +13,8 @@ if __name__ == "__main__" and MARKING_SPREADSHEET_ID != "": + sys.path.insert(0, "/workspaces/me/set2") + do_the_marking( this_year="2023", rootdir="../StudentRepos", From 621e921597162550a4a4e2778028771adf612442 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 18 Jul 2023 05:45:26 +0000 Subject: [PATCH 29/38] pass forced repos in from head function --- marking_and_admin/mark_functions.py | 5 ++++- marking_and_admin/marker.py | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index 03ade08..4fe85a9 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -646,6 +646,7 @@ def do_the_marking( mark_w5: bool = False, mark_exam: bool = False, test_number_of_students: int = 0, + force_repos: list[str] = [], ) -> None: """do_the_marking Runs tests against all student work. @@ -675,6 +676,8 @@ def do_the_marking( MARKING_SPREADSHEET_ID = marking_spreadsheet_id global MARKS_CSV MARKS_CSV = marks_csv + global FORCE_REPOS + FORCE_REPOS = force_repos start_time = time.time() @@ -766,7 +769,7 @@ def get_student_data(): with open(file_name, "r") as data_file: students = json.load(data_file) else: - force_repos =["lvl-lim"] # TODO: move this into an arg in marker.py + force_repos = FORCE_REPOS students = get_forks(force_inclusion_of_these_repos=force_repos) with open("student.json", "w") as data_file: json.dump(students, data_file, indent=2) diff --git a/marking_and_admin/marker.py b/marking_and_admin/marker.py index 9d786d7..f225910 100644 --- a/marking_and_admin/marker.py +++ b/marking_and_admin/marker.py @@ -29,4 +29,5 @@ mark_w5=False, mark_exam=False, test_number_of_students=0, # if more than 0, will only mark a sample of N repos + force_repos=["lvl-lim"], ) From fdeb075d13766d2d58023ef3ff14a8271f21409d Mon Sep 17 00:00:00 2001 From: Ben Date: Mon, 24 Jul 2023 00:46:25 +0000 Subject: [PATCH 30/38] comment out debug prints --- marking_and_admin/test_shim.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/marking_and_admin/test_shim.py b/marking_and_admin/test_shim.py index 5fc4be3..209c6e7 100644 --- a/marking_and_admin/test_shim.py +++ b/marking_and_admin/test_shim.py @@ -58,18 +58,18 @@ def results_as_json(repo_path): REPO_PATH = os.path.normpath(sys.argv[2]) OWNER = sys.argv[3] -print("\n In the shim\n◹◸◹◸◹◸◹◸◹◸◹◸\n\nsys.argv:") -for i, a in list(enumerate(sys.argv)): - print(f"{i}: {a}") +# print("\n In the shim\n◹◸◹◸◹◸◹◸◹◸◹◸\n\nsys.argv:") +# for i, a in list(enumerate(sys.argv)): +# print(f"{i}: {a}") -print( - f""" +# print( +# f""" -TEST_PATH: {TEST_PATH} -REPO_PATH: {os.path.normpath(os.path.abspath(REPO_PATH))} -OWNER: {OWNER} -""" -) +# TEST_PATH: {TEST_PATH} +# REPO_PATH: {os.path.normpath(os.path.abspath(REPO_PATH))} +# OWNER: {OWNER} +# """ +# ) with open("temp_results.json", "w", encoding="utf-8") as temp_results: test_results = results_as_json(REPO_PATH) From 76a3a9da26d5298eede01097a606ecc8481cc06c Mon Sep 17 00:00:00 2001 From: Ben Date: Mon, 24 Jul 2023 01:29:54 +0000 Subject: [PATCH 31/38] add exam explanation --- md/week9.md | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 md/week9.md diff --git a/md/week9.md b/md/week9.md new file mode 100644 index 0000000..5380e6a --- /dev/null +++ b/md/week9.md @@ -0,0 +1,26 @@ +# The Exam + +Pretty simple instructions here: + +1. Pull the latest version of the course repo +1. rename `set8/exercise1.py` to `set8/exercise1337.py` +1. Run the stests against set 8 +1. Solve the puzzles +1. commit +1. Push +1. relax, or help other people, your time is your own! + + +or if you want that in more detail: + + +1. Pull the latest version of the course repo + 1. `cd ../course` + 1. `git pull` + 1. `cd ../me` +1. rename `set8/exercise1.py` to `set8/exercise1337.py` by right clicking on it, and adding the extra numbers in. +1. Run the stests against set 8, `python ../course/set8/tests.py` (putting in the 3 if that's what your environment needs). +1. Solve the puzzles, just like you would in a normal set of exercises. Do a puzzle/function, run the tests(above) check that it works, and move on to the next one +1. commit, commit commit. do it early and often. +1. Push. Don't just push once, push all the time, every half hour or so. +1. Relax, or help other people, your time is your own! From 7f7ebe8486b550ea913f41179212637c3aa545c6 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 25 Jul 2023 04:03:52 +0000 Subject: [PATCH 32/38] add a timeout message to the error --- marking_and_admin/mark_functions.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index 4fe85a9..e8bfffd 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -503,16 +503,27 @@ def mark_a_specific_person_week( test_args = [python, path_to_test_shim, path_to_tests, path_to_repo, row.owner] try: + time_in = datetime.now() RunCmd(test_args, timeout).Run() # this is unessarily complicated - + time_out = datetime.now() + total_time_seconds = (time_out - time_in).total_seconds() # full_path = os.path.join(LOCAL, temp_file_path) with open( temp_file_path, "r", encoding="utf-8", errors="ignore" ) as temp_results: contents = temp_results.read() - # TODO: catch empty string contents, and make the error message better - results_dict = json.loads(contents) - results_dict["bigerror"] = ":)" + practical_timeout = math.floor(timeout * 0.98) + if total_time_seconds > practical_timeout: + print("\n\nAnnoying timeout ⌛⏳⌛⏳", "\n" * 5) + message = ( + "Execution timed out. " + + f"It was given {practical_timeout} seconds to complete." + ) + results_dict = {"bigerror": message, "gh_username": row.owner} + else: + # TODO: catch empty string contents, and make the error message better + results_dict = json.loads(contents) + results_dict["bigerror"] = ":)" log_progress(f" good for w{set_number}\n", logfile_name) except json.JSONDecodeError as json_exception: results_dict = { From aa05ff90144b910a132564720daa853f06f95ff5 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 25 Jul 2023 07:34:22 +0000 Subject: [PATCH 33/38] made some conspmetic changes to keep mypy happy --- marking_and_admin/mark_functions.py | 191 +++++++++++++++------------- 1 file changed, 104 insertions(+), 87 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index e8bfffd..2dc57d9 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -11,7 +11,7 @@ from datetime import datetime from io import StringIO from itertools import repeat -from typing import Any # , Optional, Set, Tuple, TypeVar +from typing import Any, Union # , Optional, Set, Tuple, TypeVar import git import pandas as pd @@ -78,8 +78,8 @@ def build_spreadsheet_service(): except OSError as os_e: print(os_e) creds = flow.run_console() - except Exception as eeee: - print(eeee) + except Exception as mystery_error: + print(mystery_error) # Save the credentials for the next run with open("token.pickle", "wb") as token: pickle.dump(creds, token) @@ -118,9 +118,9 @@ def write(service, data=[["These"], ["are"], ["some"], ["d", "entries"]]): def process_for_writing(data): for i, row in enumerate(data): for j, item in enumerate(row): - if type(item) is dict or type(item) is yaml.comments.CommentedMap: + if isinstance(item, dict) or isinstance(item, yaml.comments.CommentedMap): data[i][j] = item.get("mark", str(dict(item))) - elif type(item) is not str and math.isnan(item): + elif (not isinstance(item, str)) and math.isnan(item): data[i][j] = "" return data @@ -129,7 +129,7 @@ def process_for_notes(data): comments = [] for i, row in enumerate(data): for j, item in enumerate(row): - if type(item) is dict: + if isinstance(item, dict): readable_comment: str = prepare_comment(item) ss_comment_package: dict = set_comment(j, i, readable_comment) comments.append(ss_comment_package) @@ -138,31 +138,30 @@ def process_for_notes(data): def prepare_comment(item: dict) -> str: if "results" not in item.keys(): - fu = "some kind of major fuck up" - return f"⚠ {item.get('bigerror', fu)} ⏱ {round(item.get('time', 0))}" + fk_up = "some kind of major fuck up" + return f"⚠ {item.get('bigerror', fk_up)} ⏱ {round(item.get('time', 0))}" test_results = [] - for r in item["results"]: - icon = "👏" if r["value"] == 1 else "💩" - test_results.append( - f"{icon}: {r['name']}" - ) # TODO: trace this back, and get rid of the "name" key, make it exercise_name, or test_name - tr = "\n".join(test_results) + for res in item["results"]: + icon = "👏" if res["value"] == 1 else "💩" + test_results.append(f"{icon}: {res['name']}") + # TODO: trace this back, and get rid of the "name" key, make it exercise_name, or test_name + test_res = "\n".join(test_results) message = f"""{item['repo_owner']} ⏱ {round(item['time'])} -{tr} +{test_res} {item['mark']}/{item['of_total']}""" return message -def set_comment(x, y, comment, y_offset=1): +def set_comment(x_coord, y_coord, comment, y_offset=1): request: dict[str, Any] = { "repeatCell": { "range": { "sheetId": 1704890600, - "startRowIndex": y + y_offset, - "endRowIndex": y + 1 + y_offset, - "startColumnIndex": x, - "endColumnIndex": x + 1, + "startRowIndex": y_coord + y_offset, + "endRowIndex": y_coord + 1 + y_offset, + "startColumnIndex": x_coord, + "endColumnIndex": x_coord + 1, }, "cell": {"note": comment}, "fields": "note", @@ -171,10 +170,10 @@ def set_comment(x, y, comment, y_offset=1): return request -def get_DF_from_CSV_URL(url, column_names=False): +def get_df_from_csv_url(/service/url: str, column_names: Union[list[str], bool] = False): """Get a csv of values from google docs.""" - r = requests.get(url) - data = r.text + res = requests.get(url) + data = res.text if column_names: return pd.read_csv(StringIO(data), header=0, names=column_names) else: @@ -191,9 +190,12 @@ def get_forks( Limits to repos created this year (THIS_YEAR as a const) Args: - org (str, optional): The name of the Github user/organisation to pull the forks from. Defaults to "design-computing". - repo (str, optional): The name of the repo to get the forks of. Defaults to "me". - force_inclusion_of_these_repos (list[str], optional): _description_. Defaults to []. + org (str, optional): The name of the Github user/organisation to pull + the forks from. Defaults to "design-computing". + repo (str, optional): The name of the repo to get the forks of. + Defaults to "me". + force_inclusion_of_these_repos (list[str], optional): _description_. + Defaults to []. Raises: Exception: _description_ @@ -216,32 +218,33 @@ def get_forks( f"client_secret={secret}'" ) print("get forks from:\n", url) - r = requests.get(url) - if r.status_code == 200: - forks = r.json() + response = requests.get(url) + if response.status_code == 200: + forks = response.json() repos = [ {"owner": fork["owner"]["login"], "git_url": fork["git_url"]} for fork in forks # filter for this year's repos if (fork["created_at"][:4] == THIS_YEAR) - # a list of repos to get that aren't this year's, to account for students retaking the course + # a list of repos to get that aren't this year's, + # to account for students retaking the course or (fork["owner"]["login"] in force_inclusion_of_these_repos) ] return repos else: - rate_limit_message(r) + rate_limit_message(response) raise Exception("GitHubFuckYouError") -def rate_limit_message(r): +def rate_limit_message(response): rate_limit = requests.get("https://api.github.com/rate_limit").json().get("rate") reset_time = str( time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(rate_limit["reset"])) ) print( - r.status_code, - r.reason, - json.dumps(r.json(), indent=2), + response.status_code, + response.reason, + json.dumps(response.json(), indent=2), json.dumps(rate_limit, indent=2), "try again at" + reset_time, sep="\n", @@ -254,10 +257,10 @@ def update_repos(row: Series) -> str: https_url = url.replace("git://", "https://") owner = row["owner"] path = os.path.normpath(os.path.join(ROOTDIR, owner)) - t = datetime.now().strftime("%H:%M:%S") + time_now_str = datetime.now().strftime("%H:%M:%S") try: git.Repo.clone_from(https_url, path) - print(f"{t}: new repo for {owner}") + print(f"{time_now_str}: new repo for {owner}") return ":) new" except git.GitCommandError as git_command_error: if "already exists and is not an empty directory" in git_command_error.stderr: @@ -267,7 +270,7 @@ def update_repos(row: Series) -> str: repo = git.cmd.Git(path) try: response = repo.pull() - print(f"{t}: pulled {owner}'s repo: {response}") + print(f"{time_now_str}: pulled {owner}'s repo: {response}") return str(response) except Exception as general_exception: repo.execute(["git", "fetch", "--all"]) @@ -301,15 +304,16 @@ def try_to_kill(file_path: str, CHATTY: bool = False): try: os.remove(file_path) print(f"deleted {file_path}") - except Exception as e: + except Exception as mystery_error: if CHATTY: - print(file_path, e) + print(file_path, mystery_error) -def pull_all_repos(dirList, CHATTY: bool = False, hardcore_pull: bool = False): +def pull_all_repos(dir_list, CHATTY: bool = False, hardcore_pull: bool = False): """Pull latest version of all repos.""" - of_total = len(dirList) - for i, student_repo in enumerate(dirList): + # TODO: make sure chatty is actually a global + of_total = len(dir_list) + for i, student_repo in enumerate(dir_list): repo_is_here = os.path.join(ROOTDIR, student_repo) try: repo = git.cmd.Git(repo_is_here) @@ -317,16 +321,16 @@ def pull_all_repos(dirList, CHATTY: bool = False, hardcore_pull: bool = False): repo.execute(["git", "fetch", "--all"]) repo.execute(["git", "reset", "--hard", "origin/main"]) repo.pull() # probably not needed, but belt and braces - t = datetime.now().strftime("%H:%M:%S") - print(f"{t}: {i}/{of_total} pulled {student_repo}'s repo") - except Exception as e: - print(student_repo, e) + time_now_str = datetime.now().strftime("%H:%M:%S") + print(f"{time_now_str}: {i}/{of_total} pulled {student_repo}'s repo") + except Exception as mystery_exception: + print(student_repo, mystery_exception) -def csv_of_details(dirList): +def csv_of_details(dir_list): """Make a CSV of all the students.""" results = [] - for student_repo in dirList: + for student_repo in dir_list: path = os.path.join(ROOTDIR, student_repo, "aboutMe.yml") details = open(path).read() # replaces the @ symbol @@ -344,14 +348,14 @@ def csv_of_details(dirList): if details["studentNumber"] == "z1234567": print(student_repo, "hasn't updated") - except Exception as e: + except Exception as mystery_error: print(details) - results.append({"error": e, "repoName": student_repo}) + results.append({"error": mystery_error, "repoName": student_repo}) print("\n\nResults:") - resultsDF = pd.DataFrame(results) + results_df = pd.DataFrame(results) # print(resultsDF) - resultsDF.to_csv(os.path.join(CWD, "csv/studentDetails.csv")) + results_df.to_csv(os.path.join(CWD, "csv/studentDetails.csv")) fix_up_csv() @@ -366,7 +370,7 @@ def fix_up_csv(path="csv/studentDetails.csv"): line = line.replace("^AT^", "@") line = line.replace(",,", ",-,") lines.append(line) - with open(path, "w") as outfile: + with open(path, "w", encoding="utf-8") as outfile: for line in lines: print(line) outfile.write(line) @@ -374,12 +378,14 @@ def fix_up_csv(path="csv/studentDetails.csv"): def log_progress(message, logfile_name): """Write a message to a logfile.""" - completed_students_list = open(logfile_name, "a") + completed_students_list = open(logfile_name, "a", encoding="utf-8") completed_students_list.write(message) completed_students_list.close() -def get_readmes(row, output="mark", print_labbooks=False): +def get_readmes( + row, output="mark", print_labbooks=False +) -> Union[int, str, list[Union[int, str]]]: """Get the text, or the mark, or both related to log books.""" # intro_set = "TODO: Reflect on what you learned this set and what is still unclear." # intro_week = "TODO: Reflect on what you learned this week and what is still unclear." @@ -389,11 +395,11 @@ def get_readmes(row, output="mark", print_labbooks=False): mark = 0 all_readme = "" for i in range(1, 11): - p = os.path.join(path, f"set{i}", "readme.md") - if os.path.isfile(p): + file_path = os.path.join(path, f"set{i}", "readme.md") + if os.path.isfile(file_path): try: - with open(p, "r", encoding="utf-8", errors="ignore") as f: - contents = f.read() + with open(file_path, "r", encoding="utf-8", errors="ignore") as file: + contents = file.read() new = re.sub(regex, subst, contents, 0, re.MULTILINE).strip() # print(i,"|", new, "|", len(new)) if len(new) > 0: @@ -415,12 +421,16 @@ def get_readmes(row, output="mark", print_labbooks=False): def get_readme_text(row) -> str: """Get the collected text of all the readme files.""" - return get_readmes(row, output="textList", print_labbooks=False) + text = get_readmes(row, output="textList", print_labbooks=False) + assert isinstance(text, str) + return text def get_readme_mark(row) -> int: """Get the number of readmen files that are filled in.""" - return get_readmes(row, output="mark", print_labbooks=False) + mark = get_readmes(row, output="mark", print_labbooks=False) + assert isinstance(mark, int) + return mark def test_in_clean_environment( @@ -467,8 +477,8 @@ def get_existing_marks_from_csv(row: Series, set_number: int) -> dict: except KeyError as k: print(f"no marks for set{set_number}", k) return {} - except Exception as e: - print(e) + except Exception as mystery_error: + print(mystery_error) return {} @@ -544,50 +554,50 @@ def get_safe_path(*parts): return abs_path -def prepare_log(logfile_name, firstLine="here we go:\n"): +def prepare_log(logfile_name, first_line="here we go:\n"): """Create or empty the log file.""" - completed_students_list = open(logfile_name, "w") - completed_students_list.write(firstLine) + completed_students_list = open(logfile_name, "w", encoding="utf-8") + completed_students_list.write(first_line) completed_students_list.close() -def mark_work(dirList, set_number, root_dir, dfPlease=True, timeout=5): +def mark_work(dir_list, set_number, root_dir, df_please=True, timeout=5): """Mark the set's exercises.""" logfile_name = "temp_completion_log" prepare_log(logfile_name) - r = len(dirList) # for repeat count + repeat_count = len(dir_list) # for repeat count results = list( map( test_in_clean_environment, # Function name - dirList, # student_repo - repeat(root_dir, r), # root_dir - repeat(set_number, r), # set_number - repeat(logfile_name, r), # logfile_name - repeat(timeout, r), # timeout + dir_list, # student_repo + repeat(root_dir, repeat_count), # root_dir + repeat(set_number, repeat_count), # set_number + repeat(logfile_name, repeat_count), # logfile_name + repeat(timeout, repeat_count), # timeout ) ) - resultsDF = pd.DataFrame(results) + results_df = pd.DataFrame(results) csv_path = f"csv/set{set_number}marks.csv" - resultsDF.to_csv(os.path.join(CWD, csv_path), index=False) + results_df.to_csv(os.path.join(CWD, csv_path), index=False) for _ in [1, 2, 3]: # this is pretty dirty, but it gets tricky when you have # ,,, -> ,-,, because each intance needs to be replaced multiple times # TODO: #makeitnice fix_up_csv(path=csv_path) print("\n+-+-+-+-+-+-+-+\n\n") - if dfPlease: - return resultsDF + if df_please: + return results_df def get_details(row: Series) -> dict: try: - path_to_aboutMe = os.path.abspath( + path_to_about_me = os.path.abspath( os.path.join(ROOTDIR, row.owner, "aboutMe.yml") ) - with open(path_to_aboutMe, "r", encoding="utf-8") as yf: - details_raw_yaml = yf.read() + with open(path_to_about_me, "r", encoding="utf-8") as y_file: + details_raw_yaml = y_file.read() details: dict = dict(yaml.load(details_raw_yaml, yaml.RoundTripLoader)) details["error"] = "👍" details["owner"] = row.owner @@ -624,10 +634,14 @@ def mark_week( """Mark a single week for all students. Args: - mark_sheet (Dataframe): A dataframe that describes who's going to get marked - set_number (int, optional): The number of the set that we're marking. Defaults to 1. - timeout (int, optional): number of seconds to try for before we cut this student off. Defaults to 10. - active (bool, optional): Is this week being marked yet?. Defaults to True. + mark_sheet (Dataframe): A dataframe that describes who's + going to get marked. + set_number (int, optional): The number of the set that we're marking. + Defaults to 1. + timeout (int, optional): number of seconds to try for before we cut + this student off. Defaults to 10. + active (bool, optional): Is this week being marked yet?. + Defaults to True. Returns: Series: A series of the marks, or if not active yet, 0 @@ -663,10 +677,12 @@ def do_the_marking( Args: this_year (str, optional): The year that you want to test. Defaults to "2023". - rootdir (str, optional): Where you want to keep all the repos you're working with. Defaults to "../StudentRepos". + rootdir (str, optional): Where you want to keep all the repos you're + working with. Defaults to "../StudentRepos". chatty (bool, optional): Do you want it to be verbose? Defaults to False. force_marking (bool, optional): _description_. Defaults to False. - marking_spreadsheet_id (str, optional): _description_. Defaults to "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY". + marking_spreadsheet_id (str, optional): _description_. Defaults to + "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY". mark_w1 (bool, optional): _description_. Defaults to True. mark_w2 (bool, optional): _description_. Defaults to False. mark_w3 (bool, optional): _description_. Defaults to False. @@ -710,6 +726,7 @@ def do_the_marking( mark_sheet["updated"] = mark_sheet.apply(update_repos, axis=1) mark_sheet["last_commit"] = mark_sheet.apply(get_last_commit, axis=1) + # TODO: Pass in timeouts and activity through the args, probably like {timeout=15, active=True} mark_sheet["set1"] = mark_week(mark_sheet, set_number=1, timeout=15, active=mark_w1) mark_sheet["set2"] = mark_week(mark_sheet, set_number=2, timeout=15, active=mark_w2) mark_sheet["set3"] = mark_week(mark_sheet, set_number=3, timeout=30, active=mark_w3) From d4b1bc4e5b9e9399aa45a569868871889d72e84e Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 25 Jul 2023 07:40:06 +0000 Subject: [PATCH 34/38] made some conspmetic changes to keep mypy happy --- marking_and_admin/mark_functions.py | 197 +++++++++++++++------------- 1 file changed, 108 insertions(+), 89 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index e8bfffd..7e3a752 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -1,3 +1,5 @@ +"""All the work to actually mark the students' work.""" + import json import math import os @@ -11,7 +13,7 @@ from datetime import datetime from io import StringIO from itertools import repeat -from typing import Any # , Optional, Set, Tuple, TypeVar +from typing import Any, Union import git import pandas as pd @@ -78,8 +80,8 @@ def build_spreadsheet_service(): except OSError as os_e: print(os_e) creds = flow.run_console() - except Exception as eeee: - print(eeee) + except Exception as mystery_error: + print(mystery_error) # Save the credentials for the next run with open("token.pickle", "wb") as token: pickle.dump(creds, token) @@ -118,9 +120,9 @@ def write(service, data=[["These"], ["are"], ["some"], ["d", "entries"]]): def process_for_writing(data): for i, row in enumerate(data): for j, item in enumerate(row): - if type(item) is dict or type(item) is yaml.comments.CommentedMap: + if isinstance(item, dict) or isinstance(item, yaml.comments.CommentedMap): data[i][j] = item.get("mark", str(dict(item))) - elif type(item) is not str and math.isnan(item): + elif (not isinstance(item, str)) and math.isnan(item): data[i][j] = "" return data @@ -129,7 +131,7 @@ def process_for_notes(data): comments = [] for i, row in enumerate(data): for j, item in enumerate(row): - if type(item) is dict: + if isinstance(item, dict): readable_comment: str = prepare_comment(item) ss_comment_package: dict = set_comment(j, i, readable_comment) comments.append(ss_comment_package) @@ -138,31 +140,30 @@ def process_for_notes(data): def prepare_comment(item: dict) -> str: if "results" not in item.keys(): - fu = "some kind of major fuck up" - return f"⚠ {item.get('bigerror', fu)} ⏱ {round(item.get('time', 0))}" + fk_up = "some kind of major fuck up" + return f"⚠ {item.get('bigerror', fk_up)} ⏱ {round(item.get('time', 0))}" test_results = [] - for r in item["results"]: - icon = "👏" if r["value"] == 1 else "💩" - test_results.append( - f"{icon}: {r['name']}" - ) # TODO: trace this back, and get rid of the "name" key, make it exercise_name, or test_name - tr = "\n".join(test_results) + for res in item["results"]: + icon = "👏" if res["value"] == 1 else "💩" + test_results.append(f"{icon}: {res['name']}") + # TODO: trace this back, and get rid of the "name" key, make it exercise_name, or test_name + test_res = "\n".join(test_results) message = f"""{item['repo_owner']} ⏱ {round(item['time'])} -{tr} +{test_res} {item['mark']}/{item['of_total']}""" return message -def set_comment(x, y, comment, y_offset=1): +def set_comment(x_coord, y_coord, comment, y_offset=1): request: dict[str, Any] = { "repeatCell": { "range": { "sheetId": 1704890600, - "startRowIndex": y + y_offset, - "endRowIndex": y + 1 + y_offset, - "startColumnIndex": x, - "endColumnIndex": x + 1, + "startRowIndex": y_coord + y_offset, + "endRowIndex": y_coord + 1 + y_offset, + "startColumnIndex": x_coord, + "endColumnIndex": x_coord + 1, }, "cell": {"note": comment}, "fields": "note", @@ -171,10 +172,10 @@ def set_comment(x, y, comment, y_offset=1): return request -def get_DF_from_CSV_URL(url, column_names=False): +def get_df_from_csv_url(/service/url: str, column_names: Union[list[str], bool] = False): """Get a csv of values from google docs.""" - r = requests.get(url) - data = r.text + res = requests.get(url) + data = res.text if column_names: return pd.read_csv(StringIO(data), header=0, names=column_names) else: @@ -191,9 +192,12 @@ def get_forks( Limits to repos created this year (THIS_YEAR as a const) Args: - org (str, optional): The name of the Github user/organisation to pull the forks from. Defaults to "design-computing". - repo (str, optional): The name of the repo to get the forks of. Defaults to "me". - force_inclusion_of_these_repos (list[str], optional): _description_. Defaults to []. + org (str, optional): The name of the Github user/organisation to pull + the forks from. Defaults to "design-computing". + repo (str, optional): The name of the repo to get the forks of. + Defaults to "me". + force_inclusion_of_these_repos (list[str], optional): _description_. + Defaults to []. Raises: Exception: _description_ @@ -216,32 +220,33 @@ def get_forks( f"client_secret={secret}'" ) print("get forks from:\n", url) - r = requests.get(url) - if r.status_code == 200: - forks = r.json() + response = requests.get(url) + if response.status_code == 200: + forks = response.json() repos = [ {"owner": fork["owner"]["login"], "git_url": fork["git_url"]} for fork in forks # filter for this year's repos if (fork["created_at"][:4] == THIS_YEAR) - # a list of repos to get that aren't this year's, to account for students retaking the course + # a list of repos to get that aren't this year's, + # to account for students retaking the course or (fork["owner"]["login"] in force_inclusion_of_these_repos) ] return repos else: - rate_limit_message(r) + rate_limit_message(response) raise Exception("GitHubFuckYouError") -def rate_limit_message(r): +def rate_limit_message(response): rate_limit = requests.get("https://api.github.com/rate_limit").json().get("rate") reset_time = str( time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(rate_limit["reset"])) ) print( - r.status_code, - r.reason, - json.dumps(r.json(), indent=2), + response.status_code, + response.reason, + json.dumps(response.json(), indent=2), json.dumps(rate_limit, indent=2), "try again at" + reset_time, sep="\n", @@ -254,10 +259,10 @@ def update_repos(row: Series) -> str: https_url = url.replace("git://", "https://") owner = row["owner"] path = os.path.normpath(os.path.join(ROOTDIR, owner)) - t = datetime.now().strftime("%H:%M:%S") + time_now_str = datetime.now().strftime("%H:%M:%S") try: git.Repo.clone_from(https_url, path) - print(f"{t}: new repo for {owner}") + print(f"{time_now_str}: new repo for {owner}") return ":) new" except git.GitCommandError as git_command_error: if "already exists and is not an empty directory" in git_command_error.stderr: @@ -267,7 +272,7 @@ def update_repos(row: Series) -> str: repo = git.cmd.Git(path) try: response = repo.pull() - print(f"{t}: pulled {owner}'s repo: {response}") + print(f"{time_now_str}: pulled {owner}'s repo: {response}") return str(response) except Exception as general_exception: repo.execute(["git", "fetch", "--all"]) @@ -301,15 +306,16 @@ def try_to_kill(file_path: str, CHATTY: bool = False): try: os.remove(file_path) print(f"deleted {file_path}") - except Exception as e: + except Exception as mystery_error: if CHATTY: - print(file_path, e) + print(file_path, mystery_error) -def pull_all_repos(dirList, CHATTY: bool = False, hardcore_pull: bool = False): +def pull_all_repos(dir_list, CHATTY: bool = False, hardcore_pull: bool = False): """Pull latest version of all repos.""" - of_total = len(dirList) - for i, student_repo in enumerate(dirList): + # TODO: make sure chatty is actually a global + of_total = len(dir_list) + for i, student_repo in enumerate(dir_list): repo_is_here = os.path.join(ROOTDIR, student_repo) try: repo = git.cmd.Git(repo_is_here) @@ -317,16 +323,16 @@ def pull_all_repos(dirList, CHATTY: bool = False, hardcore_pull: bool = False): repo.execute(["git", "fetch", "--all"]) repo.execute(["git", "reset", "--hard", "origin/main"]) repo.pull() # probably not needed, but belt and braces - t = datetime.now().strftime("%H:%M:%S") - print(f"{t}: {i}/{of_total} pulled {student_repo}'s repo") - except Exception as e: - print(student_repo, e) + time_now_str = datetime.now().strftime("%H:%M:%S") + print(f"{time_now_str}: {i}/{of_total} pulled {student_repo}'s repo") + except Exception as mystery_exception: + print(student_repo, mystery_exception) -def csv_of_details(dirList): +def csv_of_details(dir_list): """Make a CSV of all the students.""" results = [] - for student_repo in dirList: + for student_repo in dir_list: path = os.path.join(ROOTDIR, student_repo, "aboutMe.yml") details = open(path).read() # replaces the @ symbol @@ -344,14 +350,14 @@ def csv_of_details(dirList): if details["studentNumber"] == "z1234567": print(student_repo, "hasn't updated") - except Exception as e: + except Exception as mystery_error: print(details) - results.append({"error": e, "repoName": student_repo}) + results.append({"error": mystery_error, "repoName": student_repo}) print("\n\nResults:") - resultsDF = pd.DataFrame(results) + results_df = pd.DataFrame(results) # print(resultsDF) - resultsDF.to_csv(os.path.join(CWD, "csv/studentDetails.csv")) + results_df.to_csv(os.path.join(CWD, "csv/studentDetails.csv")) fix_up_csv() @@ -366,7 +372,7 @@ def fix_up_csv(path="csv/studentDetails.csv"): line = line.replace("^AT^", "@") line = line.replace(",,", ",-,") lines.append(line) - with open(path, "w") as outfile: + with open(path, "w", encoding="utf-8") as outfile: for line in lines: print(line) outfile.write(line) @@ -374,12 +380,14 @@ def fix_up_csv(path="csv/studentDetails.csv"): def log_progress(message, logfile_name): """Write a message to a logfile.""" - completed_students_list = open(logfile_name, "a") + completed_students_list = open(logfile_name, "a", encoding="utf-8") completed_students_list.write(message) completed_students_list.close() -def get_readmes(row, output="mark", print_labbooks=False): +def get_readmes( + row, output="mark", print_labbooks=False +) -> Union[int, str, list[Union[int, str]]]: """Get the text, or the mark, or both related to log books.""" # intro_set = "TODO: Reflect on what you learned this set and what is still unclear." # intro_week = "TODO: Reflect on what you learned this week and what is still unclear." @@ -389,11 +397,11 @@ def get_readmes(row, output="mark", print_labbooks=False): mark = 0 all_readme = "" for i in range(1, 11): - p = os.path.join(path, f"set{i}", "readme.md") - if os.path.isfile(p): + file_path = os.path.join(path, f"set{i}", "readme.md") + if os.path.isfile(file_path): try: - with open(p, "r", encoding="utf-8", errors="ignore") as f: - contents = f.read() + with open(file_path, "r", encoding="utf-8", errors="ignore") as file: + contents = file.read() new = re.sub(regex, subst, contents, 0, re.MULTILINE).strip() # print(i,"|", new, "|", len(new)) if len(new) > 0: @@ -415,12 +423,16 @@ def get_readmes(row, output="mark", print_labbooks=False): def get_readme_text(row) -> str: """Get the collected text of all the readme files.""" - return get_readmes(row, output="textList", print_labbooks=False) + text = get_readmes(row, output="textList", print_labbooks=False) + assert isinstance(text, str) + return text def get_readme_mark(row) -> int: """Get the number of readmen files that are filled in.""" - return get_readmes(row, output="mark", print_labbooks=False) + mark = get_readmes(row, output="mark", print_labbooks=False) + assert isinstance(mark, int) + return mark def test_in_clean_environment( @@ -467,8 +479,8 @@ def get_existing_marks_from_csv(row: Series, set_number: int) -> dict: except KeyError as k: print(f"no marks for set{set_number}", k) return {} - except Exception as e: - print(e) + except Exception as mystery_error: + print(mystery_error) return {} @@ -544,50 +556,50 @@ def get_safe_path(*parts): return abs_path -def prepare_log(logfile_name, firstLine="here we go:\n"): +def prepare_log(logfile_name, first_line="here we go:\n"): """Create or empty the log file.""" - completed_students_list = open(logfile_name, "w") - completed_students_list.write(firstLine) + completed_students_list = open(logfile_name, "w", encoding="utf-8") + completed_students_list.write(first_line) completed_students_list.close() -def mark_work(dirList, set_number, root_dir, dfPlease=True, timeout=5): +def mark_work(dir_list, set_number, root_dir, df_please=True, timeout=5): """Mark the set's exercises.""" logfile_name = "temp_completion_log" prepare_log(logfile_name) - r = len(dirList) # for repeat count + repeat_count = len(dir_list) # for repeat count results = list( map( test_in_clean_environment, # Function name - dirList, # student_repo - repeat(root_dir, r), # root_dir - repeat(set_number, r), # set_number - repeat(logfile_name, r), # logfile_name - repeat(timeout, r), # timeout + dir_list, # student_repo + repeat(root_dir, repeat_count), # root_dir + repeat(set_number, repeat_count), # set_number + repeat(logfile_name, repeat_count), # logfile_name + repeat(timeout, repeat_count), # timeout ) ) - resultsDF = pd.DataFrame(results) + results_df = pd.DataFrame(results) csv_path = f"csv/set{set_number}marks.csv" - resultsDF.to_csv(os.path.join(CWD, csv_path), index=False) + results_df.to_csv(os.path.join(CWD, csv_path), index=False) for _ in [1, 2, 3]: # this is pretty dirty, but it gets tricky when you have # ,,, -> ,-,, because each intance needs to be replaced multiple times # TODO: #makeitnice fix_up_csv(path=csv_path) print("\n+-+-+-+-+-+-+-+\n\n") - if dfPlease: - return resultsDF + if df_please: + return results_df def get_details(row: Series) -> dict: try: - path_to_aboutMe = os.path.abspath( + path_to_about_me = os.path.abspath( os.path.join(ROOTDIR, row.owner, "aboutMe.yml") ) - with open(path_to_aboutMe, "r", encoding="utf-8") as yf: - details_raw_yaml = yf.read() + with open(path_to_about_me, "r", encoding="utf-8") as y_file: + details_raw_yaml = y_file.read() details: dict = dict(yaml.load(details_raw_yaml, yaml.RoundTripLoader)) details["error"] = "👍" details["owner"] = row.owner @@ -624,10 +636,14 @@ def mark_week( """Mark a single week for all students. Args: - mark_sheet (Dataframe): A dataframe that describes who's going to get marked - set_number (int, optional): The number of the set that we're marking. Defaults to 1. - timeout (int, optional): number of seconds to try for before we cut this student off. Defaults to 10. - active (bool, optional): Is this week being marked yet?. Defaults to True. + mark_sheet (Dataframe): A dataframe that describes who's + going to get marked. + set_number (int, optional): The number of the set that we're marking. + Defaults to 1. + timeout (int, optional): number of seconds to try for before we cut + this student off. Defaults to 10. + active (bool, optional): Is this week being marked yet?. + Defaults to True. Returns: Series: A series of the marks, or if not active yet, 0 @@ -663,10 +679,12 @@ def do_the_marking( Args: this_year (str, optional): The year that you want to test. Defaults to "2023". - rootdir (str, optional): Where you want to keep all the repos you're working with. Defaults to "../StudentRepos". + rootdir (str, optional): Where you want to keep all the repos you're + working with. Defaults to "../StudentRepos". chatty (bool, optional): Do you want it to be verbose? Defaults to False. force_marking (bool, optional): _description_. Defaults to False. - marking_spreadsheet_id (str, optional): _description_. Defaults to "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY". + marking_spreadsheet_id (str, optional): _description_. Defaults to + "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY". mark_w1 (bool, optional): _description_. Defaults to True. mark_w2 (bool, optional): _description_. Defaults to False. mark_w3 (bool, optional): _description_. Defaults to False. @@ -710,6 +728,7 @@ def do_the_marking( mark_sheet["updated"] = mark_sheet.apply(update_repos, axis=1) mark_sheet["last_commit"] = mark_sheet.apply(get_last_commit, axis=1) + # TODO: Pass in timeouts and activity through the args, probably like {timeout=15, active=True} mark_sheet["set1"] = mark_week(mark_sheet, set_number=1, timeout=15, active=mark_w1) mark_sheet["set2"] = mark_week(mark_sheet, set_number=2, timeout=15, active=mark_w2) mark_sheet["set3"] = mark_week(mark_sheet, set_number=3, timeout=30, active=mark_w3) @@ -777,12 +796,12 @@ def get_student_data(): students = None file_name = "student.json" if os.path.exists(file_name): - with open(file_name, "r") as data_file: + with open(file_name, "r", encoding="utf-8") as data_file: students = json.load(data_file) else: force_repos = FORCE_REPOS students = get_forks(force_inclusion_of_these_repos=force_repos) - with open("student.json", "w") as data_file: + with open("student.json", "w", encoding="utf-8") as data_file: json.dump(students, data_file, indent=2) return students From d221d82efd0ac269b1a496c657592f48cf33110d Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 25 Jul 2023 08:05:55 +0000 Subject: [PATCH 35/38] pass args in from top function --- marking_and_admin/mark_functions.py | 44 ++++++++++++++++------------- marking_and_admin/marker.py | 20 ++++++++----- 2 files changed, 38 insertions(+), 26 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index 7e3a752..69c3128 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -301,7 +301,7 @@ def update_repos(row: Series) -> str: return message -def try_to_kill(file_path: str, CHATTY: bool = False): +def try_to_kill(file_path: str): """Attempt to delete the file specified by file_path.""" try: os.remove(file_path) @@ -311,9 +311,9 @@ def try_to_kill(file_path: str, CHATTY: bool = False): print(file_path, mystery_error) -def pull_all_repos(dir_list, CHATTY: bool = False, hardcore_pull: bool = False): +def pull_all_repos(dir_list, hardcore_pull: bool = False): """Pull latest version of all repos.""" - # TODO: make sure chatty is actually a global + of_total = len(dir_list) for i, student_repo in enumerate(dir_list): repo_is_here = os.path.join(ROOTDIR, student_repo) @@ -666,12 +666,12 @@ def do_the_marking( force_marking=False, marking_spreadsheet_id: str = "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY", # 2022 marks_csv: str = "marks.csv", - mark_w1: bool = True, - mark_w2: bool = False, - mark_w3: bool = False, - mark_w4: bool = False, - mark_w5: bool = False, - mark_exam: bool = False, + w1: dict[str, int | bool] = {"timeout": 5, "active": False}, + w2: dict[str, int | bool] = {"timeout": 5, "active": False}, + w3: dict[str, int | bool] = {"timeout": 5, "active": False}, + w4: dict[str, int | bool] = {"timeout": 5, "active": False}, + w5: dict[str, int | bool] = {"timeout": 5, "active": False}, + exam: dict[str, int | bool] = {"timeout": 5, "active": False}, test_number_of_students: int = 0, force_repos: list[str] = [], ) -> None: @@ -728,14 +728,23 @@ def do_the_marking( mark_sheet["updated"] = mark_sheet.apply(update_repos, axis=1) mark_sheet["last_commit"] = mark_sheet.apply(get_last_commit, axis=1) - # TODO: Pass in timeouts and activity through the args, probably like {timeout=15, active=True} - mark_sheet["set1"] = mark_week(mark_sheet, set_number=1, timeout=15, active=mark_w1) - mark_sheet["set2"] = mark_week(mark_sheet, set_number=2, timeout=15, active=mark_w2) - mark_sheet["set3"] = mark_week(mark_sheet, set_number=3, timeout=30, active=mark_w3) - mark_sheet["set4"] = mark_week(mark_sheet, set_number=4, timeout=50, active=mark_w4) - mark_sheet["set5"] = mark_week(mark_sheet, set_number=5, timeout=50, active=mark_w5) + mark_sheet["set1"] = mark_week( + mark_sheet, set_number=1, timeout=w1["timeout"], active=w1["active"] + ) + mark_sheet["set2"] = mark_week( + mark_sheet, set_number=2, timeout=w2["timeout"], active=w2["active"] + ) + mark_sheet["set3"] = mark_week( + mark_sheet, set_number=3, timeout=w3["timeout"], active=w3["active"] + ) + mark_sheet["set4"] = mark_week( + mark_sheet, set_number=4, timeout=w4["timeout"], active=w4["active"] + ) + mark_sheet["set5"] = mark_week( + mark_sheet, set_number=5, timeout=w5["timeout"], active=w5["active"] + ) mark_sheet["exam"] = mark_week( - mark_sheet, set_number=8, timeout=45, active=mark_exam + mark_sheet, set_number=8, timeout=exam["timeout"], active=exam["active"] ) mark_sheet.drop(["name"], axis=1, errors="ignore", inplace=True) @@ -790,9 +799,6 @@ def convert_one_results_dict_to_an_int(results_dict) -> int: def get_student_data(): - # TODO: instead of loading the pickle, load the marks.csv file so that - # the dataframe is preloaded with values. Then it doesn't need to mark students - # that haven't updated their work. students = None file_name = "student.json" if os.path.exists(file_name): diff --git a/marking_and_admin/marker.py b/marking_and_admin/marker.py index f225910..2b4c99e 100644 --- a/marking_and_admin/marker.py +++ b/marking_and_admin/marker.py @@ -22,12 +22,18 @@ force_marking=True, marking_spreadsheet_id=MARKING_SPREADSHEET_ID, marks_csv="marking_and_admin/marks.csv", - mark_w1=True, - mark_w2=True, - mark_w3=True, - mark_w4=True, - mark_w5=False, - mark_exam=False, + mark_w1={"timeout":15, "active":True}, + mark_w2={"timeout":15, "active":True}, + mark_w3={"timeout":30, "active":True}, + mark_w4={"timeout":50, "active":True}, + mark_w5={"timeout":50, "active":False}, + mark_exam={"timeout":45, "active":True}, test_number_of_students=0, # if more than 0, will only mark a sample of N repos - force_repos=["lvl-lim"], + force_repos=["lvl-lim", "JeWang"], + ) +elif MARKING_SPREADSHEET_ID == "": + print( + "The MARKING_SPREADSHEET_ID is supposed to come from the env. Either " + "Ben hasn't granted you permissions, or the env is broken in some way." + "It's stored in the codespace's secrets." ) From 22e1bb031d382ae45978779a30f7950ced9c3d71 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 25 Jul 2023 08:16:15 +0000 Subject: [PATCH 36/38] oops, finish the merge --- marking_and_admin/mark_functions.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index f9dc197..69c3128 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -13,11 +13,7 @@ from datetime import datetime from io import StringIO from itertools import repeat -<<<<<<< HEAD from typing import Any, Union -======= -from typing import Any, Union # , Optional, Set, Tuple, TypeVar ->>>>>>> aa05ff90144b910a132564720daa853f06f95ff5 import git import pandas as pd @@ -315,15 +311,9 @@ def try_to_kill(file_path: str): print(file_path, mystery_error) -<<<<<<< HEAD def pull_all_repos(dir_list, hardcore_pull: bool = False): """Pull latest version of all repos.""" -======= -def pull_all_repos(dir_list, CHATTY: bool = False, hardcore_pull: bool = False): - """Pull latest version of all repos.""" - # TODO: make sure chatty is actually a global ->>>>>>> aa05ff90144b910a132564720daa853f06f95ff5 of_total = len(dir_list) for i, student_repo in enumerate(dir_list): repo_is_here = os.path.join(ROOTDIR, student_repo) From 12af37681669bb66b53a68082b30c30d79f1d864 Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 25 Jul 2023 10:33:23 +0000 Subject: [PATCH 37/38] add a typed dict so that the set meta has the right types --- marking_and_admin/mark_functions.py | 40 ++++++++++++++++------------- marking_and_admin/marker.py | 21 ++++++++++----- 2 files changed, 37 insertions(+), 24 deletions(-) diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index 69c3128..a84fbcc 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -22,6 +22,7 @@ from google.auth.transport.requests import Request from google_auth_oauthlib.flow import InstalledAppFlow from googleapiclient.discovery import build +from marker import set_meta from pandas import DataFrame, Series @@ -627,7 +628,7 @@ def get_last_commit(row: Series) -> str: # TODO: find out why I was returning a no commits error -def mark_week( +def mark_set( mark_sheet: DataFrame, set_number: int = 1, timeout: int = 10, @@ -666,12 +667,12 @@ def do_the_marking( force_marking=False, marking_spreadsheet_id: str = "16tESt_4BUf-9-oD04suTprkd1O0oEl6WjzflF_avSKY", # 2022 marks_csv: str = "marks.csv", - w1: dict[str, int | bool] = {"timeout": 5, "active": False}, - w2: dict[str, int | bool] = {"timeout": 5, "active": False}, - w3: dict[str, int | bool] = {"timeout": 5, "active": False}, - w4: dict[str, int | bool] = {"timeout": 5, "active": False}, - w5: dict[str, int | bool] = {"timeout": 5, "active": False}, - exam: dict[str, int | bool] = {"timeout": 5, "active": False}, + set_1: set_meta = {"timeout": 5, "active": False}, + set_2: set_meta = {"timeout": 5, "active": False}, + set_3: set_meta = {"timeout": 5, "active": False}, + set_4: set_meta = {"timeout": 5, "active": False}, + set_5: set_meta = {"timeout": 5, "active": False}, + exam: set_meta = {"timeout": 5, "active": False}, test_number_of_students: int = 0, force_repos: list[str] = [], ) -> None: @@ -728,22 +729,22 @@ def do_the_marking( mark_sheet["updated"] = mark_sheet.apply(update_repos, axis=1) mark_sheet["last_commit"] = mark_sheet.apply(get_last_commit, axis=1) - mark_sheet["set1"] = mark_week( - mark_sheet, set_number=1, timeout=w1["timeout"], active=w1["active"] + mark_sheet["set1"] = mark_set( + mark_sheet, set_number=1, timeout=set_1["timeout"], active=set_1["active"] ) - mark_sheet["set2"] = mark_week( - mark_sheet, set_number=2, timeout=w2["timeout"], active=w2["active"] + mark_sheet["set2"] = mark_set( + mark_sheet, set_number=2, timeout=set_2["timeout"], active=set_2["active"] ) - mark_sheet["set3"] = mark_week( - mark_sheet, set_number=3, timeout=w3["timeout"], active=w3["active"] + mark_sheet["set3"] = mark_set( + mark_sheet, set_number=3, timeout=set_3["timeout"], active=set_3["active"] ) - mark_sheet["set4"] = mark_week( - mark_sheet, set_number=4, timeout=w4["timeout"], active=w4["active"] + mark_sheet["set4"] = mark_set( + mark_sheet, set_number=4, timeout=set_4["timeout"], active=set_4["active"] ) - mark_sheet["set5"] = mark_week( - mark_sheet, set_number=5, timeout=w5["timeout"], active=w5["active"] + mark_sheet["set5"] = mark_set( + mark_sheet, set_number=5, timeout=set_5["timeout"], active=set_5["active"] ) - mark_sheet["exam"] = mark_week( + mark_sheet["exam"] = mark_set( mark_sheet, set_number=8, timeout=exam["timeout"], active=exam["active"] ) mark_sheet.drop(["name"], axis=1, errors="ignore", inplace=True) @@ -797,6 +798,9 @@ def convert_one_results_dict_to_an_int(results_dict) -> int: convert_one_results_dict_to_an_int ) + mark_sheet["exam_data"] = mark_sheet[f"exam"] + mark_sheet[f"exam"] = mark_sheet[f"exam"].apply(convert_one_results_dict_to_an_int) + def get_student_data(): students = None diff --git a/marking_and_admin/marker.py b/marking_and_admin/marker.py index 2b4c99e..88d7bf3 100644 --- a/marking_and_admin/marker.py +++ b/marking_and_admin/marker.py @@ -6,9 +6,18 @@ """ import os import sys +from typing import TypedDict from mark_functions import do_the_marking + +class set_meta(TypedDict): + """week is just to keep the typechecker happy.""" + + timeout: int + active: bool + + MARKING_SPREADSHEET_ID = os.getenv("GOOGLE_SHEETS_KEY", "") @@ -22,12 +31,12 @@ force_marking=True, marking_spreadsheet_id=MARKING_SPREADSHEET_ID, marks_csv="marking_and_admin/marks.csv", - mark_w1={"timeout":15, "active":True}, - mark_w2={"timeout":15, "active":True}, - mark_w3={"timeout":30, "active":True}, - mark_w4={"timeout":50, "active":True}, - mark_w5={"timeout":50, "active":False}, - mark_exam={"timeout":45, "active":True}, + set_1={"timeout": 15, "active": True}, + set_2={"timeout": 15, "active": True}, + set_3={"timeout": 30, "active": True}, + set_4={"timeout": 50, "active": True}, + set_5={"timeout": 50, "active": False}, + exam={"timeout": 45, "active": True}, test_number_of_students=0, # if more than 0, will only mark a sample of N repos force_repos=["lvl-lim", "JeWang"], ) From 8efa39ab1458f63ec548683e6ed803b44b463b26 Mon Sep 17 00:00:00 2001 From: Ben Date: Wed, 26 Jul 2023 05:10:46 +0000 Subject: [PATCH 38/38] move set meta into its own file --- marking_and_admin/mark_functions.py | 2 +- marking_and_admin/marker.py | 9 --------- marking_and_admin/marking_types.py | 8 ++++++++ 3 files changed, 9 insertions(+), 10 deletions(-) create mode 100644 marking_and_admin/marking_types.py diff --git a/marking_and_admin/mark_functions.py b/marking_and_admin/mark_functions.py index a84fbcc..909aeac 100644 --- a/marking_and_admin/mark_functions.py +++ b/marking_and_admin/mark_functions.py @@ -22,7 +22,7 @@ from google.auth.transport.requests import Request from google_auth_oauthlib.flow import InstalledAppFlow from googleapiclient.discovery import build -from marker import set_meta +from marking_types import set_meta from pandas import DataFrame, Series diff --git a/marking_and_admin/marker.py b/marking_and_admin/marker.py index 88d7bf3..96c3795 100644 --- a/marking_and_admin/marker.py +++ b/marking_and_admin/marker.py @@ -6,18 +6,9 @@ """ import os import sys -from typing import TypedDict from mark_functions import do_the_marking - -class set_meta(TypedDict): - """week is just to keep the typechecker happy.""" - - timeout: int - active: bool - - MARKING_SPREADSHEET_ID = os.getenv("GOOGLE_SHEETS_KEY", "") diff --git a/marking_and_admin/marking_types.py b/marking_and_admin/marking_types.py new file mode 100644 index 0000000..7a9dc84 --- /dev/null +++ b/marking_and_admin/marking_types.py @@ -0,0 +1,8 @@ +from typing import TypedDict + + +class set_meta(TypedDict): + """week is just to keep the typechecker happy.""" + + timeout: int + active: bool