Skip to content
Snippets Groups Projects
Commit d5b111a0 authored by tuhe's avatar tuhe
Browse files

Docker build and download updates

parent 22f78c15
Branches
No related tags found
No related merge requests found
...@@ -32,5 +32,5 @@ setuptools.setup( ...@@ -32,5 +32,5 @@ setuptools.setup(
packages=setuptools.find_packages(where="src"), packages=setuptools.find_packages(where="src"),
python_requires=">=3.8", python_requires=">=3.8",
license="MIT", license="MIT",
install_requires=['numpy', 'tabulate', "pyfiglet", "coverage", "colorama", 'tqdm'], install_requires=['numpy', 'tabulate', "pyfiglet", "coverage", "colorama", 'tqdm', 'importnb'],
) )
Metadata-Version: 2.1 Metadata-Version: 2.1
Name: unitgrade Name: unitgrade
Version: 0.1.14 Version: 0.1.16
Summary: A student homework/exam evaluation framework build on pythons unittest framework. Summary: A student homework/exam evaluation framework build on pythons unittest framework.
Home-page: https://lab.compute.dtu.dk/tuhe/unitgrade Home-page: https://lab.compute.dtu.dk/tuhe/unitgrade
Author: Tue Herlau Author: Tue Herlau
......
...@@ -5,6 +5,7 @@ setup.py ...@@ -5,6 +5,7 @@ setup.py
src/unitgrade/__init__.py src/unitgrade/__init__.py
src/unitgrade/evaluate.py src/unitgrade/evaluate.py
src/unitgrade/framework.py src/unitgrade/framework.py
src/unitgrade/runners.py
src/unitgrade/utils.py src/unitgrade/utils.py
src/unitgrade/version.py src/unitgrade/version.py
src/unitgrade.egg-info/PKG-INFO src/unitgrade.egg-info/PKG-INFO
......
...@@ -4,3 +4,4 @@ pyfiglet ...@@ -4,3 +4,4 @@ pyfiglet
coverage coverage
colorama colorama
tqdm tqdm
importnb
from unitgrade.version import __version__ from unitgrade.version import __version__
from unitgrade.utils import myround, msum, mfloor, Capturing, ActiveProgress, cache, hide from unitgrade.utils import myround, msum, mfloor, Capturing, ActiveProgress, cache, hide
# from unitgrade import hide # from unitgrade import hide
from unitgrade.framework import Report, UTestCase from unitgrade.framework import Report, UTestCase, NotebookTestCase
from unitgrade.evaluate import evaluate_report_student from unitgrade.evaluate import evaluate_report_student
# from unitgrade import utils # from unitgrade import utils
# import os # import os
......
...@@ -7,7 +7,7 @@ from datetime import datetime ...@@ -7,7 +7,7 @@ from datetime import datetime
import pyfiglet import pyfiglet
from unitgrade import msum from unitgrade import msum
import unittest import unittest
from unitgrade.framework import UTextResult from unitgrade.runners import UTextResult, UTextTestRunner
import inspect import inspect
import os import os
import argparse import argparse
...@@ -141,7 +141,6 @@ def evaluate_report(report, question=None, qitem=None, passall=False, verbose=Fa ...@@ -141,7 +141,6 @@ def evaluate_report(report, question=None, qitem=None, passall=False, verbose=Fa
q.possible = 0 q.possible = 0
q.obtained = 0 q.obtained = 0
# q_ = {} # Gather score in this class. # q_ = {} # Gather score in this class.
from unitgrade.framework import UTextTestRunner
UTextResult.q_title_print = q_title_print # Hacky UTextResult.q_title_print = q_title_print # Hacky
UTextResult.show_progress_bar = show_progress_bar # Hacky. UTextResult.show_progress_bar = show_progress_bar # Hacky.
UTextResult.number = n UTextResult.number = n
......
import importnb
import numpy as np import numpy as np
import sys import sys
import pickle import pickle
import os import os
import io
from unittest.runner import _WritelnDecorator
import inspect import inspect
import colorama import colorama
import unittest import unittest
import time import time
import textwrap import textwrap
from unitgrade import ActiveProgress from unitgrade.runners import UTextResult
from unitgrade.utils import gprint, Capturing2 from unitgrade.utils import gprint, Capturing2, Capturing
colorama.init(autoreset=True) # auto resets your settings after every output colorama.init(autoreset=True) # auto resets your settings after every output
def setup_dir_by_class(C, base_dir): def setup_dir_by_class(C, base_dir):
...@@ -102,152 +101,6 @@ class Report: ...@@ -102,152 +101,6 @@ class Report:
self._config = payloads['config'] self._config = payloads['config']
class UTextResult(unittest.TextTestResult):
nL = 80
number = -1 # HAcky way to set question number.
show_progress_bar = True
unmute = False # Whether to redirect stdout.
cc = None
setUpClass_time = 3 # Estimated time to run setUpClass in TestCase. Must be set externally. See key (("ClassName", "setUpClass"), "time") in _cache.
def __init__(self, stream, descriptions, verbosity):
super().__init__(stream, descriptions, verbosity)
self.successes = []
def printErrors(self) -> None:
# TODO: Fix here. probably also needs to flush stdout.
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def addError(self, test, err):
super(unittest.TextTestResult, self).addError(test, err)
err = self.errors[-1][1]
stdout = sys.stdout.log.readlines() # Only works because we set sys.stdout to a unitgrade.Logger
self.errors[-1] = (self.errors[-1][0], {'return': None,
'stderr': err,
'stdout': stdout
})
self.cc_terminate(success=False)
def addFailure(self, test, err):
super(unittest.TextTestResult, self).addFailure(test, err)
err = self.failures[-1][1]
stdout = sys.stdout.log.readlines() # Only works because we set sys.stdout to a unitgrade.Logger
self.failures[-1] = (self.failures[-1][0], {'return': None,
'stderr': err,
'stdout': stdout
})
self.cc_terminate(success=False)
def addSuccess(self, test: unittest.case.TestCase) -> None:
msg = None
stdout = sys.stdout.log.readlines() # Only works because we set sys.stdout to a unitgrade.Logger
if hasattr(test, '_get_outcome'):
o = test._get_outcome()
if isinstance(o, dict):
key = (test.cache_id(), "return")
if key in o:
msg = test._get_outcome()[key]
# print(sys.stdout.readlines())
self.successes.append((test, None)) # (test, message) (to be consistent with failures and errors).
self.successes[-1] = (self.successes[-1][0], {'return': msg,
'stdout': stdout,
'stderr': None})
self.cc_terminate()
def cc_terminate(self, success=True):
if self.show_progress_bar or True:
tsecs = np.round(self.cc.terminate(), 2)
self.cc.file.flush()
ss = self.item_title_print
state = "PASS" if success else "FAILED"
dot_parts = ('.' * max(0, self.nL - len(state) - len(ss)))
if self.show_progress_bar or True:
print(self.item_title_print + dot_parts, end="", file=self.cc.file)
else:
print(dot_parts, end="", file=self.cc.file)
if tsecs >= 0.5:
state += " (" + str(tsecs) + " seconds)"
print(state, file=self.cc.file)
def startTest(self, test):
name = test.__class__.__name__
if self.testsRun == 0 and hasattr(test.__class__, '_cache2'): # Disable this if the class is pure unittest.TestCase
# This is the first time we are running a test. i.e. we can time the time taken to call setupClass.
if test.__class__._cache2 is None:
test.__class__._cache2 = {}
test.__class__._cache2[((name, 'setUpClass'), 'time')] = time.time() - self.t_start
self.testsRun += 1
item_title = test.shortDescription() # Better for printing (get from cache).
if item_title == None:
# For unittest framework where getDescription may return None.
item_title = self.getDescription(test)
self.item_title_print = " * q%i.%i) %s" % (UTextResult.number + 1, self.testsRun, item_title)
if self.show_progress_bar or True:
estimated_time = test.__class__._cache.get(((name, test._testMethodName), 'time'), 100) if hasattr(test.__class__, '_cache') else 4
self.cc = ActiveProgress(t=estimated_time, title=self.item_title_print, show_progress_bar=self.show_progress_bar, file=sys.stdout)
else:
print(self.item_title_print + ('.' * max(0, self.nL - 4 - len(self.item_title_print))), end="")
self._test = test
# if not self.unmute:
self._stdout = sys.stdout # Redundant. remove later.
from unitgrade.utils import Logger
sys.stdout = Logger(io.StringIO(), write_to_stdout=self.unmute)
def stopTest(self, test):
# if not self.unmute:
buff = sys.stdout.log
sys.stdout = self._stdout # redundant.
buff.close()
from unitgrade.utils import Logger
super().stopTest(test)
def _setupStdout(self):
if self._previousTestClass == None:
self.t_start = time.time()
if hasattr(self.__class__, 'q_title_print'):
q_title_print = self.__class__.q_title_print
else:
q_title_print = "<unnamed test. See unitgrade.framework.py>"
cc = ActiveProgress(t=self.setUpClass_time, title=q_title_print, show_progress_bar=self.show_progress_bar)
self.cc = cc
def _restoreStdout(self): # Used when setting up the test.
if self._previousTestClass is None:
q_time = self.cc.terminate()
q_time = np.round(q_time, 2)
sys.stdout.flush()
if self.show_progress_bar:
print(self.cc.title, end="")
print(" " * max(0, self.nL - len(self.cc.title)) + (" (" + str(q_time) + " seconds)" if q_time >= 0.5 else ""))
class UTextTestRunner(unittest.TextTestRunner):
def __init__(self, *args, **kwargs):
stream = io.StringIO()
super().__init__(*args, stream=stream, **kwargs)
def _makeResult(self):
# stream = self.stream # not you!
stream = sys.stdout
stream = _WritelnDecorator(stream)
return self.resultclass(stream, self.descriptions, self.verbosity)
def get_hints(ss): def get_hints(ss):
if ss == None: if ss == None:
return None return None
...@@ -302,8 +155,6 @@ class UTestCase(unittest.TestCase): ...@@ -302,8 +155,6 @@ class UTestCase(unittest.TestCase):
cls._cache = None cls._cache = None
cls._cache2 = None cls._cache2 = None
def _callSetUp(self): def _callSetUp(self):
if self._with_coverage: if self._with_coverage:
if self._covcache is None: if self._covcache is None:
...@@ -592,3 +443,14 @@ class UTestCase(unittest.TestCase): ...@@ -592,3 +443,14 @@ class UTestCase(unittest.TestCase):
super().startTestRun() super().startTestRun()
# 817, 705 # 817, 705
class NotebookTestCase(UTestCase):
notebook = None
_nb = None
@classmethod
def setUpClass(cls) -> None:
with Capturing():
cls._nb = importnb.Notebook.load(cls.notebook)
@property
def nb(self):
return self.__class__._nb
\ No newline at end of file
import io
import sys
import time
import unittest
from unittest.runner import _WritelnDecorator
import numpy as np
from unitgrade import ActiveProgress
class UTextResult(unittest.TextTestResult):
nL = 80
number = -1 # HAcky way to set question number.
show_progress_bar = True
unmute = False # Whether to redirect stdout.
cc = None
setUpClass_time = 3 # Estimated time to run setUpClass in TestCase. Must be set externally. See key (("ClassName", "setUpClass"), "time") in _cache.
def __init__(self, stream, descriptions, verbosity):
super().__init__(stream, descriptions, verbosity)
self.successes = []
def printErrors(self) -> None:
# TODO: Fix here. probably also needs to flush stdout.
self.printErrorList('ERROR', [(test, res['stderr']) for test, res in self.errors])
self.printErrorList('FAIL', [(test, res['stderr']) for test, res in self.failures])
def addError(self, test, err):
super(unittest.TextTestResult, self).addError(test, err)
err = self.errors[-1][1]
stdout = sys.stdout.log.readlines() # Only works because we set sys.stdout to a unitgrade.Logger
self.errors[-1] = (self.errors[-1][0], {'return': None,
'stderr': err,
'stdout': stdout
})
self.cc_terminate(success=False)
def addFailure(self, test, err):
super(unittest.TextTestResult, self).addFailure(test, err)
err = self.failures[-1][1]
stdout = sys.stdout.log.readlines() # Only works because we set sys.stdout to a unitgrade.Logger
self.failures[-1] = (self.failures[-1][0], {'return': None,
'stderr': err,
'stdout': stdout
})
self.cc_terminate(success=False)
def addSuccess(self, test: unittest.case.TestCase) -> None:
msg = None
stdout = sys.stdout.log.readlines() # Only works because we set sys.stdout to a unitgrade.Logger
if hasattr(test, '_get_outcome'):
o = test._get_outcome()
if isinstance(o, dict):
key = (test.cache_id(), "return")
if key in o:
msg = test._get_outcome()[key]
# print(sys.stdout.readlines())
self.successes.append((test, None)) # (test, message) (to be consistent with failures and errors).
self.successes[-1] = (self.successes[-1][0], {'return': msg,
'stdout': stdout,
'stderr': None})
self.cc_terminate()
def cc_terminate(self, success=True):
if self.show_progress_bar or True:
tsecs = np.round(self.cc.terminate(), 2)
self.cc.file.flush()
ss = self.item_title_print
state = "PASS" if success else "FAILED"
dot_parts = ('.' * max(0, self.nL - len(state) - len(ss)))
if self.show_progress_bar or True:
print(self.item_title_print + dot_parts, end="", file=self.cc.file)
else:
print(dot_parts, end="", file=self.cc.file)
if tsecs >= 0.5:
state += " (" + str(tsecs) + " seconds)"
print(state, file=self.cc.file)
def startTest(self, test):
name = test.__class__.__name__
if self.testsRun == 0 and hasattr(test.__class__, '_cache2'): # Disable this if the class is pure unittest.TestCase
# This is the first time we are running a test. i.e. we can time the time taken to call setupClass.
if test.__class__._cache2 is None:
test.__class__._cache2 = {}
test.__class__._cache2[((name, 'setUpClass'), 'time')] = time.time() - self.t_start
self.testsRun += 1
item_title = test.shortDescription() # Better for printing (get from cache).
if item_title == None:
# For unittest framework where getDescription may return None.
item_title = self.getDescription(test)
self.item_title_print = " * q%i.%i) %s" % (UTextResult.number + 1, self.testsRun, item_title)
if self.show_progress_bar or True:
estimated_time = test.__class__._cache.get(((name, test._testMethodName), 'time'), 100) if hasattr(test.__class__, '_cache') else 4
self.cc = ActiveProgress(t=estimated_time, title=self.item_title_print, show_progress_bar=self.show_progress_bar, file=sys.stdout)
else:
print(self.item_title_print + ('.' * max(0, self.nL - 4 - len(self.item_title_print))), end="")
self._test = test
# if not self.unmute:
self._stdout = sys.stdout # Redundant. remove later.
from unitgrade.utils import Logger
sys.stdout = Logger(io.StringIO(), write_to_stdout=self.unmute)
def stopTest(self, test):
# if not self.unmute:
buff = sys.stdout.log
sys.stdout = self._stdout # redundant.
buff.close()
from unitgrade.utils import Logger
super().stopTest(test)
def _setupStdout(self):
if self._previousTestClass == None:
self.t_start = time.time()
if hasattr(self.__class__, 'q_title_print'):
q_title_print = self.__class__.q_title_print
else:
q_title_print = "<unnamed test. See unitgrade.framework.py>"
cc = ActiveProgress(t=self.setUpClass_time, title=q_title_print, show_progress_bar=self.show_progress_bar)
self.cc = cc
def _restoreStdout(self): # Used when setting up the test.
if self._previousTestClass is None:
q_time = self.cc.terminate()
q_time = np.round(q_time, 2)
sys.stdout.flush()
if self.show_progress_bar:
print(self.cc.title, end="")
print(" " * max(0, self.nL - len(self.cc.title)) + (" (" + str(q_time) + " seconds)" if q_time >= 0.5 else ""))
class UTextTestRunner(unittest.TextTestRunner):
def __init__(self, *args, **kwargs):
stream = io.StringIO()
super().__init__(*args, stream=stream, **kwargs)
def _makeResult(self):
# stream = self.stream # not you!
stream = sys.stdout
stream = _WritelnDecorator(stream)
return self.resultclass(stream, self.descriptions, self.verbosity)
\ No newline at end of file
__version__ = "0.1.15" __version__ = "0.1.17"
\ No newline at end of file \ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment