-
-
Save pauloxnet/810cc8586bf6cf59c7a8bdd3c8256250 to your computer and use it in GitHub Desktop.
Report feature flag values per Django's database backends
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import ast | |
| import dataclasses | |
| import enum | |
| import json | |
| import pathlib | |
| DB_BACKENDS_BASE_DIR = pathlib.Path("django/db/backends/") | |
| FEAUTRE_FILE_NAME = "features.py" | |
| @dataclasses.dataclass | |
| class DatabaseFeatureFlags: | |
| name: str | |
| path: str | |
| class_name: str | |
| @property | |
| def features_path(self): | |
| return DB_BACKENDS_BASE_DIR / self.path / FEAUTRE_FILE_NAME | |
| def get_feature_flags(self): | |
| for node in ast.parse(self.features_path.read_text()).body: | |
| if isinstance(node, ast.ClassDef) and node.name == self.class_name: | |
| feature_flags = {} | |
| for statement in node.body: | |
| if ( | |
| isinstance(statement, ast.Assign) | |
| and len(statement.targets) == 1 | |
| and isinstance(name := statement.targets[0], ast.Name) | |
| ): | |
| match statement.value: | |
| case ast.Dict(): | |
| continue | |
| case _: | |
| try: | |
| value = ast.literal_eval(statement.value) | |
| except (ValueError, TypeError, SyntaxError): | |
| value = None | |
| feature_flags[name.id] = ( | |
| list(value) if isinstance(value, set) else value | |
| ) | |
| return feature_flags | |
| return {} | |
| BASE = DatabaseFeatureFlags("Base", "base", "BaseDatabaseFeatures") | |
| MYSQL = DatabaseFeatureFlags("MySQL 🐬", "mysql", "DatabaseFeatures") | |
| ORACLE = DatabaseFeatureFlags("Oracle 🔮", "oracle", "DatabaseFeatures") | |
| POSTGRES = DatabaseFeatureFlags("PostgreSQL 🐘", "postgresql", "DatabaseFeatures") | |
| SQLITE = DatabaseFeatureFlags("SQLite 🪶", "sqlite3", "DatabaseFeatures") | |
| DB_BACKENDS = [POSTGRES, MYSQL, SQLITE, ORACLE] | |
| def report_features(): | |
| base_feature_flags = BASE.get_feature_flags() | |
| db_feature_flags = {db.path: db.get_feature_flags() for db in DB_BACKENDS} | |
| feature_jsonl = [] | |
| for feature_flag in sorted(base_feature_flags): | |
| row = {"feature": feature_flag.replace("_", " ").capitalize()} | |
| for db in DB_BACKENDS: | |
| value = db_feature_flags.get(db.path, {}).get( | |
| feature_flag, base_feature_flags[feature_flag] | |
| ) | |
| row[db.path] = None if value is None else bool(value) | |
| feature_jsonl.append(row) | |
| return feature_jsonl | |
| class Symbol(enum.Enum): | |
| YES = "✅" | |
| NO = "🚫" | |
| UNKNOWN = "⚠️" | |
| def repr_feature(value): | |
| match value: | |
| case True: | |
| return Symbol.YES.value | |
| case False: | |
| return Symbol.NO.value | |
| case None: | |
| return Symbol.UNKNOWN.value | |
| case _: | |
| return str(value) | |
| def jsonl_to_markdown_table(jsonl_path: pathlib.Path, markdown_path: pathlib.Path): | |
| with jsonl_path.open(encoding="utf-8") as infile: | |
| rows = [json.loads(line) for line in infile if line.strip()] | |
| if not rows: | |
| raise ValueError("The JSONL is empty.") | |
| headers = {"feature": "Feature"} | {db.path: db.name for db in DB_BACKENDS} | |
| lines = [ | |
| "| **" + "** | **".join(headers.values()) + "** |", | |
| "| " + " | ".join("-" * len(head) for head in headers.values()) + " |", | |
| ] | |
| lines.extend( | |
| "| " + " | ".join(repr_feature(row.get(h, "")) for h in headers) + " |" | |
| for row in rows | |
| ) | |
| markdown_path.write_text("\n".join(lines), encoding="utf-8") | |
| if __name__ == "__main__": | |
| features = report_features() | |
| jsonl_file = pathlib.Path("matrix.jsonl") | |
| with jsonl_file.open("w", encoding="utf-8") as output_file: | |
| for feature in features: | |
| output_file.write(json.dumps(feature, ensure_ascii=False) + "\n") | |
| md_file = pathlib.Path("matrix.md") | |
| jsonl_to_markdown_table(jsonl_file, md_file) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment