Skip to content
Snippets Groups Projects
Commit add87927 authored by Yifan Zhao's avatar Yifan Zhao
Browse files

Implemented model P2

parent 40251fc8
No related branches found
No related tags found
No related merge requests found
......@@ -6,8 +6,7 @@ from typing import Dict, Generic, List, Optional, Tuple, Type, TypeVar, Union
import matplotlib.pyplot as plt
import numpy as np
from opentuner.measurement.interface import MeasurementInterface
from opentuner.search.manipulator import (ConfigurationManipulator,
EnumParameter)
from opentuner.search.manipulator import ConfigurationManipulator, EnumParameter
from ._logging import override_opentuner_config
from ._pareto import is_pareto_efficient
......@@ -58,6 +57,15 @@ class ApproxApp(abc.ABC):
the user should try to make it unique."""
return ""
@property
def ops(self) -> List[str]:
return list(self.op_knobs)
@property
def knobs(self) -> List[ApproxKnob]:
knob_sets = [set(knobs) for knobs in self.op_knobs.values()]
return list(set.union(*knob_sets))
class Config:
def __init__(
......
import abc
import json
import logging
from typing import Callable, Dict, List, Optional, Tuple, Type, Union
import pickle
from pathlib import Path
from typing import Callable, Dict, Iterator, List, Optional, Tuple, Type, Union
import numpy as np
import pandas as pd
import torch
from .approxapp import ApproxApp, ApproxTuner, Config, KnobsT
from .approxapp import ApproxApp, ApproxKnob, ApproxTuner, Config, KnobsT
msg_logger = logging.getLogger(__name__)
PathLike = Union[Path, str]
class ModeledApp(ApproxApp, abc.ABC):
......@@ -20,6 +26,9 @@ class ModeledApp(ApproxApp, abc.ABC):
def __init__(self) -> None:
super().__init__()
models = self.get_models()
self._name_to_model = {m.name: m for m in models}
if len(self._name_to_model) != len(models):
raise ValueError("Name conflict in models")
self._perf_models = {
model.name: model for model in models if isinstance(model, IPerfModel)
}
......@@ -83,10 +92,16 @@ class ModeledApp(ApproxApp, abc.ABC):
def get_tuner(self) -> "ApproxModeledTuner":
return ApproxModeledTuner(self)
def _init_model(self, model_name: str):
self._name_to_model[model_name]._init()
class IPerfModel(abc.ABC):
"""Abstract base class for models that provide performance prediction."""
def __init__(self) -> None:
self._inited = False
@property
@abc.abstractmethod
def name(self) -> str:
......@@ -95,13 +110,20 @@ class IPerfModel(abc.ABC):
@abc.abstractmethod
def measure_perf(self, with_approxes: KnobsT) -> float:
"""We implement this using a weighted linear performance model."""
"""Predict the performance of application."""
pass
def _init(self):
"""Initialize the model before the first prediction task (profiling, etc.)"""
self._inited = True
class IQoSModel(abc.ABC):
"""Abstract base class for models that provide QoS prediction."""
def __init__(self) -> None:
self._inited = False
@property
@abc.abstractmethod
def name(self) -> str:
......@@ -110,9 +132,13 @@ class IQoSModel(abc.ABC):
@abc.abstractmethod
def measure_qos(self, with_approxes: KnobsT) -> float:
"""We implement this using a weighted linear performance model."""
"""Predict the qos of application."""
pass
def _init(self):
"""Initialize the model before the first prediction task (profiling, etc.)"""
self._inited = True
class LinearPerfModel(IPerfModel):
"""Weighted linear performance predictor based on cost of each operator."""
......@@ -156,12 +182,20 @@ class QoSModelP1(IQoSModel):
def __init__(
self,
app: ModeledApp,
tensor_output_getter: Callable[[KnobsT], torch.Tensor],
qos_metric: Callable[[torch.Tensor], float],
storage: PathLike = None,
) -> None:
super().__init__()
self.app = app
self.output_f = tensor_output_getter
self.qos_metric = qos_metric
self.storage = Path(storage) if storage else None
self.delta_tensors = {
op: {k.name: None for k in self.app.knobs} for op in self.app.ops
}
self.baseline_tensor = self.output_f({})
@property
def name(self) -> str:
......@@ -169,15 +203,58 @@ class QoSModelP1(IQoSModel):
def measure_qos(self, with_approxes: KnobsT) -> float:
"""Implementation of model."""
return 0.0
assert self.baseline_tensor is not None
delta_tensors = np.array(
[self.delta_tensors[op][knob] for op, knob in with_approxes.items()]
)
ret = delta_tensors.sum() + self.baseline_tensor
return self.qos_metric(ret)
def _init(self):
dt = self.delta_tensors
btensor = self.baseline_tensor
if self.storage and self.storage.is_file():
for op, knob, delta_tensor in self._load(self.storage):
dt[op][knob] = delta_tensor
for op, knob in barred_ravel_knobs(self.app):
if dt[op][knob] is not None:
continue
delta_tensor = self.output_f({op: knob}) - btensor
dt[op][knob] = delta_tensor
self._try_append_save(self.storage, op, knob, delta_tensor)
super()._init()
@staticmethod
def _load(path: Path) -> Iterator[Tuple[str, str, torch.Tensor]]:
msg_logger.info(f"Found pickle at {path}")
with path.open("rb") as f:
while True:
try:
op_name, knob_name, tensor = pickle.load(f)
yield op_name, knob_name, tensor
except EOFError:
return
@staticmethod
def _try_append_save(
path: Optional[Path], op_name: str, knob_name: str, tensor: torch.Tensor
):
if not path:
return
path.touch(exist_ok=True)
with path.open("ab") as f:
pickle.dump((op_name, knob_name, tensor), f)
class QoSModelP2(IQoSModel):
"""QoS model `P2` in ApproxTuner."""
def __init__(self, app: ModeledApp) -> None:
def __init__(self, app: ModeledApp, storage: PathLike = None) -> None:
super().__init__()
self.app = app
self.storage = Path(storage) if storage else None
self.qos_df = None
self.baseline_qos = None
@property
def name(self) -> str:
......@@ -194,8 +271,56 @@ class QoSModelP2(IQoSModel):
return qos
def measure_qos(self, with_approxes: KnobsT) -> float:
"""Implementation of model."""
return 0.0
assert self.baseline_qos is not None and self.qos_df is not None
delta_qoses = np.array(
[self.qos_df.loc[kv] for kv in with_approxes.items()]
) - self.baseline_qos
ret = delta_qoses.sum() + self.baseline_qos
assert not np.isnan(ret)
return ret
def _init(self):
if self.storage and self.storage.is_file():
self.qos_df, self.baseline_qos = self._load(self.storage)
else:
knob_names = [k.name for k in self.app.knobs]
self.qos_df = pd.DataFrame(index=self.app.ops, columns=knob_names)
self.baseline_qos = self._empirical_measure_qos({})
df = self.qos_df
for op, knob in barred_ravel_knobs(self.app):
if not np.isnan(df.loc[op, knob]):
continue
df.loc[op, knob] = self._empirical_measure_qos({op: knob})
if self.storage and not self.storage.is_file():
self._save(self.storage)
super()._init()
def _load(self, path: Path) -> Tuple[pd.DataFrame, float]:
with path.open() as f:
data = json.load(f)
df = pd.DataFrame(data["df"])
baseline_qos = float(data["bqos"])
if "app_name" in data:
name = data["app_name"]
if self.app.name != name:
msg_logger.error(
f'Profile at {path} belongs to app "{name}" '
f"while our app is {self.app.name}"
)
else:
msg_logger.warning("Loaded profile does not have app name identifier")
return df, baseline_qos
def _save(self, path: Path):
with path.open("w") as f:
json.dump(
{
"app_name": self.app.name,
"df": self.qos_df.to_dict(),
"bqos": self.baseline_qos,
},
f,
)
class ValConfig(Config):
......@@ -212,6 +337,8 @@ class ValConfig(Config):
class ApproxModeledTuner(ApproxTuner):
app: ModeledApp
def tune(
self,
max_iter: int,
......@@ -224,6 +351,10 @@ class ApproxModeledTuner(ApproxTuner):
perf_model: str = "none",
qos_model: str = "none",
) -> List[ValConfig]:
if qos_model != "none":
self.app._init_model(qos_model)
if perf_model != "none":
self.app._init_model(perf_model)
ret = super().tune(
max_iter=max_iter,
qos_tuner_threshold=qos_tuner_threshold,
......@@ -256,3 +387,18 @@ class ApproxModeledTuner(ApproxTuner):
@classmethod
def _get_config_class(cls) -> Type[Config]:
return ValConfig
def barred_ravel_knobs(app: ApproxApp) -> Iterator[Tuple[str, str]]:
"""Flattens op_knobs of app to a list of (layer, knob) pairs while showing 2 levels of
progress bar."""
from tqdm import tqdm
bar1 = tqdm(app.op_knobs.items(), leave=None)
for op_name, knobs in bar1:
bar1.set_postfix(op=op_name)
bar2 = tqdm(knobs, leave=None)
for knob in bar2:
bar2.set_postfix(knob=knob.name)
yield op_name, knob.name
......@@ -108,13 +108,14 @@ class TorchApp(ModeledApp, abc.ABC):
qoses = []
for _, target in self.val_loader:
end = begin + len(target)
target = move_to_device_recursively(target, self.device)
qos = self.tensor_to_qos(tensor_output[begin:end], target)
qoses.append(qos)
return self.combine_qos(np.array(qoses))
return [
LinearPerfModel(self._op_costs, self._knob_speedups),
QoSModelP1(self._get_raw_output_valset, batched_valset_qos),
QoSModelP1(self, self._get_raw_output_valset, batched_valset_qos),
QoSModelP2(self),
]
......@@ -154,7 +155,7 @@ class TorchApp(ModeledApp, abc.ABC):
inputs = move_to_device_recursively(inputs, self.device)
outputs = approxed(inputs)
all_outputs.append(outputs)
return torch.stack(all_outputs)
return torch.cat(all_outputs, dim=0)
@staticmethod
def _check_baseline_knob(knobs: Set[TorchApproxKnob]) -> Set[TorchApproxKnob]:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment