Skip to content
Snippets Groups Projects
Commit a697b1c4 authored by tuhe's avatar tuhe
Browse files

Removed bad __main__ block that caused buggy behavior in some cases

parent 193eeaef
No related branches found
No related tags found
No related merge requests found
Metadata-Version: 2.1
Name: unitgrade
Version: 0.1.30.9
Version: 0.1.30.15
Summary: A student homework/exam evaluation framework build on pythons unittest framework.
Home-page: https://lab.compute.dtu.dk/tuhe/unitgrade
Author: Tue Herlau
......
from threading import Thread
import importnb
import numpy as np
import sys
......@@ -16,6 +17,7 @@ from unittest.case import TestCase
from unitgrade.runners import UTextResult
from unitgrade.utils import gprint, Capturing2, Capturing
from unitgrade.artifacts import StdCapturing
from unitgrade.utils import DKPupDB
colorama.init(autoreset=True) # auto resets your settings after every output
......@@ -25,7 +27,6 @@ def setup_dir_by_class(C, base_dir):
name = C.__class__.__name__
return base_dir, name
_DASHBOARD_COMPLETED_MESSAGE = "Dashboard> Evaluation completed."
# Consolidate this code.
......@@ -35,7 +36,6 @@ class classmethod_dashboard(classmethod):
if not cls._generate_artifacts:
f(cls)
return
from unitgrade.utils import DKPupDB
db = DKPupDB(cls._artifact_file_for_setUpClass())
r = np.random.randint(1000 * 1000)
db.set('run_id', r)
......@@ -71,6 +71,7 @@ class classmethod_dashboard(classmethod):
sys.stdout = _stdout
sys.stderr = _stderr
std_capture.close()
super().__init__(dashboard_wrap)
class Report:
......@@ -103,8 +104,10 @@ class Report:
return inspect.getfile(type(self))
def _artifact_file(self):
""" File for the artifacts DB (thread safe). This file is optinal. Note that it is a pupdb database file.
Note the file is shared between all sub-questions. """
""" File for the artifacts DB (thread safe). This file is optional. Note that it is a pupdb database file.
Note the file is shared between all sub-questions.
TODO: Describe what is actually in this file.
"""
return os.path.join(os.path.dirname(self._file()), "unitgrade_data/main_config_"+ os.path.basename(self._file()[:-3]) + ".artifacts.pkl")
def _manifest_file(self):
......@@ -112,6 +115,7 @@ class Report:
The manifest is the file we append all artifact hashes to so we can check results at some later time.
file is plaintext, and can be deleted.
"""
# print("_file", self._file())
return os.path.join(os.path.dirname(self._file()), "unitgrade_data/token_" + os.path.basename(self._file()[:-3]) + ".manifest")
def _is_run_in_grade_mode(self):
......@@ -162,9 +166,17 @@ class Report:
q._setup_answers_mode = True
# q._generate_artifacts = False # Disable artifact generation when the report is being set up.
# Ensure that the lock file exists.
if self.url is not None:
if not os.path.dirname(self._get_remote_lock_file()):
os.makedirs(os.path.dirname(self._get_remote_lock_file()))
if not os.path.isdir(d_ := os.path.dirname(self._get_remote_lock_file())):
os.makedirs(d_)
with open(self._get_remote_lock_file(), 'w') as f:
f.write("If this file is present, we will not synchronize this directory with a remote (using report.url).\nThis is a very good idea during development, but the lock-file should be disabled (in gitignore) for the students.")
from unitgrade import evaluate_report_student
evaluate_report_student(self, unmute=verbose, noprogress=not verbose, generate_artifacts=False) # Disable artifact generation.
# self.main() # Run all tests in class just to get that out of the way...
report_cache = {}
for q, _ in self.questions:
......@@ -189,17 +201,36 @@ class Report:
q._cache = payloads[q.__qualname__]
self._config = payloads['config']
def _get_remote_lock_file(self):
return os.path.join(os.path.dirname( self._artifact_file() ), "dont_check_remote.lock")
def _check_remote_versions(self):
if self.url is None:
if self.url is None: # No url, no problem.
return
if os.path.isfile(self._get_remote_lock_file() ):
print("Since the file", self._get_remote_lock_file(), "was present I will not compare the files on this computer with git")
print("i.e., I am assuming this is being run on the teachers computer. Remember to put the file in .gitignore for the students!")
return
if self._file().endswith("_complete.py"):
print("Unitgrade> You are trying to check the remote version of a *_tests_complete.py-file, and you will potentially overwrite part of this file.")
print("Unitgrade> Please add a unitgrade_data/note_remote_check.lock - file to this directory (and put it in the .gitignore) to avoid data loss.")
print(self._file())
raise Exception("Unitgrade> You are trying to check the remote version of a *_tests_complete.py-file, and you will potentially overwrite part of this file.")
print("CHECKING THE REMOTE VERSION. ")
url = self.url
if not url.endswith("/"):
url += "/"
snapshot_file = os.path.dirname(self._file()) + "/unitgrade_data/.snapshot"
if os.path.isfile(snapshot_file):
with open(snapshot_file, 'r') as f:
t = f.read()
if (time.time() - float(t)) < self._remote_check_cooldown_seconds:
db = DKPupDB("check_on_remote")
if 'last_check_time' in db:
# with open(snapshot_file, 'r') as f:
t = db['last_check_time']
if (time.time() - t) < self._remote_check_cooldown_seconds:
return
if self.url.startswith("https://gitlab"):
......@@ -207,11 +238,13 @@ class Report:
# "https://gitlab.compute.dtu.dk/tuhe/unitgrade_private/-/raw/master/examples/autolab_example_py_upload/instructor/cs102_autolab/report2_test.py?inline=false"
# url = self.url
url = url.replace("-/tree", "-/raw")
url = url.replace("-/blob", "-/raw")
# print(url)
# "https://gitlab.compute.dtu.dk/tuhe/unitgrade_private/-/tree/master/examples/autolab_example_py_upload/instructor/cs102_autolab"
# "https://gitlab.compute.dtu.dk/tuhe/unitgrade_private/-/raw/master/examples/autolab_example_py_upload/instructor/report2_test.py?inline=false"
# "https://gitlab.compute.dtu.dk/tuhe/unitgrade_private/-/raw/master/examples/autolab_example_py_upload/instructor/cs102_autolab/report2_test.py?inline=false"
raw_url = urllib.parse.urljoin(url, os.path.basename(self._file()) + "?inline=false")
# raw_url = url
# print("Is this file run in local mode?", self._is_run_in_grade_mode())
if self._is_run_in_grade_mode():
remote_source = requests.get(raw_url).text
......@@ -220,62 +253,73 @@ class Report:
if local_source != remote_source:
print("\nThe local version of this report is not identical to the remote version which can be found at")
print(self.url)
print("The most likely reason for this is that the remote version was updated by the teacher due to some issue.")
print("You should check if there was an announcement and update the test to the most recent version; most likely")
print("This can be done by running the command")
print("> git pull")
print("The most likely reason for this is that the remote version was updated by the teacher due to an issue.")
print("You can find the most recent code here:")
print(self.url)
raise Exception(f"Version of grade script does not match the remote version. Please update using git pull")
raise Exception(f"Version of grade script does not match the remote version. Please update your grade script.")
else:
text = requests.get(raw_url).text
node = ast.parse(text)
classes = [n for n in node.body if isinstance(n, ast.ClassDef) if n.name == self.__class__.__name__][0]
version_remote = None
for b in classes.body:
# print(b.)
if b.targets[0].id == "version":
# print(b)
# print(b.value)
version_remote = b.value.value
break
if version_remote != self.version:
print("\nThe version of this report", self.version, "does not match the version of the report on git", version_remote)
if version_remote is not None and version_remote != self.version:
print("\nThe version of this report", self.version, "does not match the version of the report on git:", version_remote)
print("The most likely reason for this is that the remote version was updated by the teacher due to some issue.")
print("You should check if there was an announcement and update the test to the most recent version; most likely")
print("This can be done by running the command")
print("> git pull")
print("You can find the most recent code here:")
print("What I am going to do is to download the correct version from git so you are up to date. ")
print("You should check if there was an announcement and update the test to the most recent version. This can be done by downloading the files in")
print(self.url)
raise Exception(f"Version of test on remote is {version_remote}, which is different than this version of the test {self.version}. Please update your test to the most recent version.")
print("and putting them in the corresponding folder on your computer.")
with open(self._file(), "w") as f:
f.write(text)
raise Exception(f"Version of test on remote is {version_remote}, which is different than this version of the test {self.version}. I have manually updated your tests.")
for (q, _) in self.questions:
if issubclass(q, UTestCase):
qq = q(skip_remote_check=True)
cfile = q._cache_file()
if not os.path.isdir(d_ := os.path.dirname(cfile)):
os.makedirs(d_) # The unitgrade_data directory does not exist so we create it.
relpath = os.path.relpath(cfile, os.path.dirname(self._file()))
relpath = relpath.replace("\\", "/")
raw_url = urllib.parse.urljoin(url, relpath + "?inline=false")
# requests.get(raw_url)
if os.path.isfile(cfile):
with open(cfile, 'rb') as f:
b1 = f.read()
else:
b1 = bytes() # No pkl file exists. We set it to the empty string.
b2 = requests.get(raw_url).content
if b1 != b2:
print("\nQuestion ", qq.title, "relies on the data file", cfile)
print("However, it appears that this file is missing or in a different version than the most recent found here:")
print(self.url)
print("The most likely reason for this is that the remote version was updated by the teacher due to some issue.")
print("You should check if there was an announcement and update the test to the most recent version; most likely")
print("This can be done by simply running the command")
print("> git pull")
print("to avoid running bad tests against good code, the program will now stop. Please update and good luck!")
raise Exception("The data file for the question", qq.title, "did not match remote source found on git. The test will therefore automatically fail. Please update your test/data files.")
t = time.time()
print("The most likely reason for this is that the remote version was updated by the teacher.")
print("I will now try to download the file automatically, WCGW?")
with open(cfile, 'wb') as f:
f.write(b2)
print("Local data file updated successfully.")
# print("You should check if there was an announcement and update the test to the most recent version; most likely")
# print("This can be done by simply running the command")
# print("> git pull")
# print("to avoid running bad tests against good code, the program will now stop. Please update and good luck!")
# raise Exception("The data file for the question", qq.title, "did not match remote source found on git. The test will therefore automatically fail. Please update your test/data files.")
# t = time.time()
if os.path.isdir(os.path.dirname(self._file()) + "/unitgrade_data"):
with open(snapshot_file, 'w') as f:
f.write(f"{t}")
db['last_check_time'] = time.time()
# with open(snapshot_file, 'w') as f:
# f.write(f"{t}")
def get_hints(ss):
""" Extract all blocks of the forms:
......@@ -305,21 +349,7 @@ def get_hints(ss):
except Exception as e:
print("bad hints", ss, e)
from threading import Thread
class WandUpload(Thread):
# - What do you want to know? What might be of help?
# - What errors occur
# - How many times each test is run, and how many times it fails
# - What kind of errors occur in the tests
# - timestamps
# For each test, track the number of runs and the different errors
# For each test, track which errors many have in common.
def run(self):
pass
pass
class UTestCase(unittest.TestCase):
# a = 234
......@@ -363,10 +393,10 @@ class UTestCase(unittest.TestCase):
if not self._generate_artifacts:
return super().run(result)
print(result)
# print(result)
mute = False
if isinstance(result, UTextResult):
print(result.show_errors_in_grade_mode)
# print(result.show_errors_in_grade_mode)
mute = not result.show_errors_in_grade_mode
else:
pass
......@@ -376,7 +406,7 @@ class UTestCase(unittest.TestCase):
from unitgrade.utils import DKPupDB
self._error_fed_during_run = [] # Initialize this to be empty.
db = DKPupDB(self._artifact_file(), register_ephemeral=True)
db = DKPupDB(self._testcase_artifact_file(), register_ephemeral=True)
db.set("state", "running")
db.set('run_id', np.random.randint(1000*1000))
db.set('coverage_files_changed', None)
......@@ -390,13 +420,9 @@ class UTestCase(unittest.TestCase):
try:
# Run this unittest and record all of the output.
# This is probably where we should hijack the stdout output and save it -- after all, this is where the test is actually run.
# sys.stdout = stdout_capture
sys.stderr = std_capture.dummy_stderr
sys.stdout = std_capture.dummy_stdout
# db.get('stdout')
# db.get('stderr')
# db.get("history")
result_ = TestCase.run(self, result)
from werkzeug.debug.tbtools import DebugTraceback, _process_traceback
......@@ -462,7 +488,7 @@ class UTestCase(unittest.TestCase):
data = self.cov.get_data()
base, _, _ = self._report._import_base_relative()
for file in data.measured_files():
print(file)
# print(file)
file = os.path.normpath(file)
root = Path(base)
child = Path(file)
......@@ -486,7 +512,7 @@ class UTestCase(unittest.TestCase):
l = ll-1
# print(l, lines2[l])
if l < len(lines2) and lines2[l].strip() == garb:
print("Got one.")
# print("Got one.")
rel = os.path.relpath(child, root)
cc = self._covcache
j = 0
......@@ -500,7 +526,7 @@ class UTestCase(unittest.TestCase):
if rel not in cc:
cc[rel] = {}
cc[rel][fun] = (l, "\n".join(comments))
print("found", rel, fun)
# print("found", rel, fun)
# print(file, ll)
self._cache_put((self.cache_id(), 'coverage'), self._covcache)
......@@ -552,14 +578,10 @@ class UTestCase(unittest.TestCase):
self._load_cache()
self._assert_cache_index = 0
# Perhaps do a sanity check here to see if the cache is up to date? To do that, we must make sure the
# cache exists locally.
# Find the report class this class is defined within.
if skip_remote_check:
return
import importlib, inspect
found_reports = []
good_module_name = self.__module__
try:
importlib.import_module(good_module_name)
......@@ -580,8 +602,14 @@ class UTestCase(unittest.TestCase):
raise Exception("This question is a member of multiple reports. That should not be the case -- don't get too creative.")
if len(found_reports) > 0:
report = found_reports[0]
report()._check_remote_versions()
try:
r_ = report()
if not r_._is_run_in_grade_mode(): # Disable url request handling during evaluation.
r_._check_remote_versions()
except Exception as e:
print("Unitgrade> Warning, I tried to compare with the remote source for this report but was unable to do so.")
print(e)
print("Unitgrade> The exception was", e)
def _ensure_cache_exists(self):
if not hasattr(self.__class__, '_cache') or self.__class__._cache == None:
......@@ -721,9 +749,9 @@ class UTestCase(unittest.TestCase):
# cf = os.path.dirname(inspect.getabsfile(cls)) + "/unitgrade_data/" + cls.__name__
return file
def _artifact_file(self):
""" File for the artifacts DB (thread safe). This file is optinal.
Note the file is shared between all sub-questions. """
def _testcase_artifact_file(self):
""" As best as I can tell, this file is only used as an index (key) in the db. For historical reasons it is formatted as a .json
but the file will not actually be written to. """
return os.path.join(os.path.dirname(self.__class__._cache_file()), '-'.join(self.cache_id()) + ".json")
def _save_cache(self):
......
......@@ -287,6 +287,8 @@ def hash_string(s):
# gfg.update(s.encode("utf-8"))
# return gfg.digest()
def hash2url(hash):
return hash[:16]
def picklestring2dict(picklestr):
""" Reverse of the above method: Turns the string back into a dictionary. """
......@@ -335,6 +337,8 @@ def checkout_remote_results(remote_url, manifest):
SEP = "-----------"
remote = {ll[0]: ll[1] for ll in [l.strip().split(" ") for l in html.split(SEP)[1].strip().splitlines()] }
# lines =
# print(remote_url)
# print(remote)
mf = [m.strip().split(" ")[-1] for m in manifest.strip().splitlines()]
a = 23
......@@ -342,7 +346,7 @@ def checkout_remote_results(remote_url, manifest):
url = None
for hash in reversed(mf):
if hash_string(hash) in remote:
url = f"{remote_url}/{os.path.dirname( remote[hash_string(hash)] )}/{hash}/index.html"
url = f"{remote_url}/{os.path.dirname( remote[hash_string(hash)] )}/{hash2url(hash)}/index.html"
with urllib.request.urlopen(url) as response:
html = response.read().decode()
# print( html )
......@@ -354,7 +358,6 @@ def checkout_remote_results(remote_url, manifest):
df = dfs[0]
# df.__format__()
# tabular
# print( df.to_string(index=False) )
# df.as
result = dict(html=html, df=df, score=float( df.iloc[2].to_list()[-1] ), url=url)
......@@ -430,10 +433,10 @@ class DKPupDB:
return item in self.dk[self.name_] #keys()
# return item in self.dk
if __name__ == "__main__":
url = "https://cp.pages.compute.dtu.dk/02002public/_static/evaluation/"
manifest = """
/home/tuhe/Documents/unitgrade_private/src/unitgrade_private/pipelines/tmp/students/cp/project0/Project0_handin_0_of_10.token 7720b41ab925098956c7db37c8292ce3a7b4ded96f4442234dee493c021fc5f7294e543de78630aaf873b756d25bf7b4fd7eb6e66cec282b54f0c35b83e9071f
"""
checkout_remote_results(url, manifest = manifest)
# if __name__ == "__main__":
# url = "https://cp.pages.compute.dtu.dk/02002public/_static/evaluation/"
# manifest = """
# /home/tuhe/Documents/unitgrade_private/src/unitgrade_private/pipelines/tmp/students/cp/project0/Project0_handin_0_of_10.token 7720b41ab925098956c7db37c8292ce3a7b4ded96f4442234dee493c021fc5f7294e543de78630aaf873b756d25bf7b4fd7eb6e66cec282b54f0c35b83e9071f
# """
# # checkout_remote_results(url, manifest = manifest)
__version__ = "0.1.30.9"
__version__ = "0.1.30.15"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment