Getting straight to the point here, check out json file number one (formatted like a legitimate EIP-712 hash):
{
"types": {
"EIP712Domain": [
{
"name": "name",
"type": "string"
},
Getting straight to the point here, check out json file number one (formatted like a legitimate EIP-712 hash):
{
"types": {
"EIP712Domain": [
{
"name": "name",
"type": "string"
},
// Decompiled by library.dedaub.com | |
// 2025.04.18 18:07 UTC | |
object "contract" { | |
code { } | |
object "runtime" { | |
code { | |
function func_0x311e(_0, _1, _2) -> ret_val_0, ret_val_1 { | |
let _3 := mload(0x40) | |
mstore(_3, _0) |
// SPDX-License-Identifier: LGPL-3.0-only | |
pragma solidity 0.7.6; | |
import "./GnosisSafe.sol"; | |
contract GnosisSafeTest is GnosisSafe { | |
constructor() { | |
// Initialize the Gnosis Safe with some owners and a threshold | |
address[] memory owners = new address[](3); | |
owners[0] = address(0x1); |
// SPDX-License-Identifier: LGPL-3.0-only | |
pragma solidity 0.7.6; | |
// src/common/Enum.sol | |
/// @title Enum - Collection of enums | |
/// @author Richard Meissner - <[email protected]> | |
contract Enum { | |
enum Operation { | |
Call, |
import { expect } from "chai"; | |
import hre, { deployments, waffle } from "hardhat"; | |
import "@nomiclabs/hardhat-ethers"; | |
import { getSafeWithOwners } from "../utils/setup"; | |
import { executeContractCallWithSigners, calculateSafeMessageHash } from "../../src/utils/execution"; | |
import { chainId } from "../utils/encoding"; | |
describe("SignMessageLib", async () => { | |
const [user1, user2] = waffle.provider.getWallets(); |
#!/usr/bin/env python3 | |
import json | |
import openai | |
import time | |
import os | |
import logging | |
from openai.error import InvalidRequestError, RateLimitError | |
from concurrent.futures import ThreadPoolExecutor |
import argparse | |
import os | |
import torch | |
from accelerate import Accelerator | |
from datasets import load_dataset | |
from peft import LoraConfig, get_peft_model, prepare_model_for_int8_training, set_peft_model_state_dict | |
from torch.utils.data import IterableDataset | |
from tqdm import tqdm | |
from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer, Trainer, TrainingArguments, logging, set_seed |
import argparse | |
import csv | |
import os | |
# Function to calculate total rows in CSV | |
def get_total_rows(csv_file): | |
with open(csv_file, 'r') as f: | |
return sum(1 for row in csv.reader(f)) - 1 # Exclude header | |
# Function to split CSV files |
# Importing necessary libraries for data preprocessing and visualization | |
import matplotlib.pyplot as plt | |
import numpy as np | |
from tqdm.notebook import trange | |
import pandas as pd | |
import random | |
import torch | |
import re | |
from datasets import load_dataset | |
from simplet5 import SimpleT5 |
import os | |
import logging | |
import csv | |
import argparse | |
from tqdm import tqdm | |
import sys | |
from dotenv import load_dotenv | |
import openai | |
logging.basicConfig(level=logging.INFO, |