Begin afresh

This commit is contained in:
andytudhope 2021-01-12 16:53:43 +02:00
parent 1a35a4a018
commit c061afe386
207 changed files with 37827 additions and 0 deletions

9
.editorconfig Normal file
View File

@ -0,0 +1,9 @@
root=true
[*]
end_of_line = lf
insert_final_newline = true
charset = utf-8
indent_size = 2
indent_style = space
trim_trailing_whitespace= true

6
.eslintignore Normal file
View File

@ -0,0 +1,6 @@
node_modules
dist
build
internals
static
.eslintrc.js

57
.eslintrc.js Normal file
View File

@ -0,0 +1,57 @@
const fs = require('fs')
const path = require('path')
const prettierOptions = JSON.parse(
fs.readFileSync(path.resolve(__dirname, '.prettierrc'), 'utf8')
)
module.exports = {
parser: '@typescript-eslint/parser',
plugins: [
'@typescript-eslint',
'react',
'react-hooks',
'eslint-plugin-import',
'prettier',
],
env: {
browser: true,
},
extends: [
'plugin:@typescript-eslint/recommended',
'plugin:react/recommended',
'plugin:prettier/recommended',
],
parserOptions: {
project: ['tsconfig.json'],
ecmaVersion: 2020,
sourceType: 'module',
ecmaFeatures: {
jsx: true,
},
},
rules: {
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/no-unused-vars': 'off',
'react/jsx-filename-extension': [
'warn',
{
extensions: ['.jsx', '.tsx'],
},
],
'react/prop-types': 'off',
'react-hooks/rules-of-hooks': 'error',
'react-hooks/exhaustive-deps': 'warn',
'@typescript-eslint/no-empty-interface': [
'warn',
{
allowSingleExtends: false,
},
],
},
settings: {
react: {
version: 'detect',
},
},
}

1
.gitattributes vendored Normal file
View File

@ -0,0 +1 @@
*.sol diff linguist-language=Solidity

206
.gitignore vendored Normal file
View File

@ -0,0 +1,206 @@
# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig
# Created by https://www.gitignore.io/api/visualstudiocode,windows,linux,macos,node
# Edit at https://www.gitignore.io/?templates=visualstudiocode,windows,linux,macos,node
### Linux ###
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# next.js build output
.next
# nuxt.js build output
.nuxt
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
.idea
.project
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
### Windows ###
# Windows thumbnail cache files
Thumbs.db
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# End of https://www.gitignore.io/api/visualstudiocode,windows,linux,macos,node
# Slither
crytic-export/
# AWS EB CLI
.elasticbeanstalk/
# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)
contractAbi
.embark
chains.json
config/development/mnemonic
config/livenet/
coverage
node_modules
package-lock.json
dist
full-build
flattenedContracts
WebApp/app/embarkArtifacts/
shared.development.chains.json
# production
/build
app.zip
# misc
.DS_Store
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*
WebApp/app/embarkArtifacts

2
.npmrc Normal file
View File

@ -0,0 +1,2 @@
# Force npm to run node-gyp also as root, preventing permission denied errors in AWS with npm@5
unsafe-perm=true

47
.prettierignore Normal file
View File

@ -0,0 +1,47 @@
full-build/
dist/
Backend/
Build/
WebApp/build/
.embark/
config/
node_modules/
internals/generators/
internals/scripts/
package-lock.json
yarn.lock
package.json
readme.md
*.svg
.stylelintrc
.prettierignore
.nvmrc
.gitignore
.gitattributes
.eslintignore
.env.example
.env
.editorconfig
*.png
*.woff
*.woff2
*.zip
*.pem
.htaccess
.npmrc
.soliumignore
LICENSE
Makefile
*.bak
*.xml
*.ico
*.sol
.gitkeep
*.log
*.jpg
*.jpeg
*.gz
*.bin
*.png
*.hbs

8
.prettierrc Normal file
View File

@ -0,0 +1,8 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": false,
"singleQuote": true,
"trailingComma": "es5"
}

1
.soliumignore Normal file
View File

@ -0,0 +1 @@
node_modules

11
.soliumrc.json Normal file
View File

@ -0,0 +1,11 @@
{
"extends": "solium:all",
"plugins": ["security"],
"rules": {
"security/no-inline-assembly": "off",
"security/no-assign-params": "off",
"quotes": ["error", "double"],
"indentation": ["error", 4],
"arg-overflow": ["warning", 3]
}
}

View File

@ -0,0 +1,541 @@
[
{
"constant": false,
"inputs": [
{
"name": "_id",
"type": "bytes32"
},
{
"name": "_amount",
"type": "uint256"
}
],
"name": "withdraw",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function",
"signature": "0x040cf020"
},
{
"constant": true,
"inputs": [
{
"name": "_id",
"type": "bytes32"
},
{
"name": "_amount",
"type": "uint256"
}
],
"name": "upvoteEffect",
"outputs": [
{
"name": "effect",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0x1248edd1"
},
{
"constant": true,
"inputs": [],
"name": "safeMax",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0x199e1698"
},
{
"constant": false,
"inputs": [
{
"name": "_id",
"type": "bytes32"
},
{
"name": "_amount",
"type": "uint256"
}
],
"name": "upvote",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function",
"signature": "0x2b3df690"
},
{
"constant": true,
"inputs": [],
"name": "total",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0x2ddbd13a"
},
{
"constant": true,
"inputs": [],
"name": "decimals",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0x313ce567"
},
{
"constant": false,
"inputs": [
{
"name": "_newController",
"type": "address"
}
],
"name": "changeController",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function",
"signature": "0x3cebb823"
},
{
"constant": true,
"inputs": [
{
"name": "",
"type": "bytes32"
}
],
"name": "existingIDs",
"outputs": [
{
"name": "",
"type": "bool"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0x57073d4b"
},
{
"constant": true,
"inputs": [],
"name": "getDAppsCount",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0x5ecaa4ff"
},
{
"constant": true,
"inputs": [],
"name": "max",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0x6ac5db19"
},
{
"constant": true,
"inputs": [
{
"name": "_id",
"type": "bytes32"
}
],
"name": "downvoteCost",
"outputs": [
{
"name": "b",
"type": "uint256"
},
{
"name": "vR",
"type": "uint256"
},
{
"name": "c",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0x6fe39f64"
},
{
"constant": true,
"inputs": [],
"name": "ceiling",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0x753ed1bd"
},
{
"constant": false,
"inputs": [
{
"name": "_id",
"type": "bytes32"
},
{
"name": "_amount",
"type": "uint256"
},
{
"name": "_metadata",
"type": "bytes32"
}
],
"name": "createDApp",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function",
"signature": "0x7e38d973"
},
{
"constant": false,
"inputs": [
{
"name": "_newCeiling",
"type": "uint256"
}
],
"name": "setCeiling",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function",
"signature": "0x8f02cf97"
},
{
"constant": false,
"inputs": [
{
"name": "_from",
"type": "address"
},
{
"name": "_amount",
"type": "uint256"
},
{
"name": "_token",
"type": "address"
},
{
"name": "_data",
"type": "bytes"
}
],
"name": "receiveApproval",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function",
"signature": "0x8f4ffcb1"
},
{
"constant": true,
"inputs": [
{
"name": "",
"type": "uint256"
}
],
"name": "dapps",
"outputs": [
{
"name": "developer",
"type": "address"
},
{
"name": "id",
"type": "bytes32"
},
{
"name": "metadata",
"type": "bytes32"
},
{
"name": "balance",
"type": "uint256"
},
{
"name": "rate",
"type": "uint256"
},
{
"name": "available",
"type": "uint256"
},
{
"name": "votesMinted",
"type": "uint256"
},
{
"name": "votesCast",
"type": "uint256"
},
{
"name": "effectiveBalance",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0x9640fe35"
},
{
"constant": true,
"inputs": [
{
"name": "",
"type": "bytes32"
}
],
"name": "id2index",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0xac56f70f"
},
{
"constant": false,
"inputs": [
{
"name": "_id",
"type": "bytes32"
},
{
"name": "_amount",
"type": "uint256"
}
],
"name": "downvote",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function",
"signature": "0xac769090"
},
{
"constant": true,
"inputs": [
{
"name": "_id",
"type": "bytes32"
}
],
"name": "withdrawMax",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0xcb2b6d26"
},
{
"constant": false,
"inputs": [
{
"name": "_id",
"type": "bytes32"
},
{
"name": "_metadata",
"type": "bytes32"
}
],
"name": "setMetadata",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function",
"signature": "0xd3525adf"
},
{
"constant": true,
"inputs": [],
"name": "controller",
"outputs": [
{
"name": "",
"type": "address"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"signature": "0xf77c4791"
},
{
"inputs": [
{
"name": "_SNT",
"type": "address"
}
],
"payable": false,
"stateMutability": "nonpayable",
"type": "constructor"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"name": "id",
"type": "bytes32"
},
{
"indexed": false,
"name": "newEffectiveBalance",
"type": "uint256"
}
],
"name": "DAppCreated",
"type": "event",
"signature": "0x868bbbb32f410f6626146ffc2d5c58cfdb3f08798ea4965f55c2b7d30a6f9c6c"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"name": "id",
"type": "bytes32"
},
{
"indexed": false,
"name": "newEffectiveBalance",
"type": "uint256"
}
],
"name": "Upvote",
"type": "event",
"signature": "0x967f7d6ea4a44117f4cb822f761b5c76cbeac4c6ab5cfbaa59447574fa126bc2"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"name": "id",
"type": "bytes32"
},
{
"indexed": false,
"name": "newEffectiveBalance",
"type": "uint256"
}
],
"name": "Downvote",
"type": "event",
"signature": "0xa84d42fd75bca8f5ea3b11dc2aee00753b6ced6a669d6ad32396e1fef5b13528"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"name": "id",
"type": "bytes32"
},
{
"indexed": false,
"name": "newEffectiveBalance",
"type": "uint256"
}
],
"name": "Withdraw",
"type": "event",
"signature": "0x4591ca0897d0d8e83f7153dfe0b2912125672084ab8d84be59ee13240a1778bc"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"name": "id",
"type": "bytes32"
}
],
"name": "MetadataUpdated",
"type": "event",
"signature": "0x6e27af24ead46b4b469e383b46b4b75487fcf1ffce54d216add332f9de2120c5"
},
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"name": "oldCeiling",
"type": "uint256"
},
{
"indexed": false,
"name": "newCeiling",
"type": "uint256"
}
],
"name": "CeilingUpdated",
"type": "event",
"signature": "0x90aac84a509ee3cec594d7cc6a04f31c498c4d91783e5cdbcf454ab8aaaae321"
}
]

View File

@ -0,0 +1,5 @@
const web3 = require('./web3')
const config = require('./../config')
const DiscoverABI = require('./discover-abi.json')
module.exports = new web3.eth.Contract(DiscoverABI, config.DISCOVER_CONTRACT)

View File

@ -0,0 +1,6 @@
const Web3 = require('web3');
const config = require('../config')
module.exports = new Web3(
new Web3.providers.WebsocketProvider(config.BLOCKCHAIN_CONNECTION_POINT)
);

View File

@ -0,0 +1,15 @@
let mongoose = require('mongoose');
const config = require('./');
class DBConfig {
static config() {
if (config.DB_CONNECTION == undefined) {
throw Error('Unable to find MongoDB URI in DB_CONNECTION env variable!')
}
mongoose.Promise = global.Promise;
mongoose.connect(config.DB_CONNECTION, { useNewUrlParser: true });
}
}
module.exports = DBConfig;

40
Backend/config/index.js Normal file
View File

@ -0,0 +1,40 @@
/* This file is a central place for managing backend settings */
/* shorthand */
const env = process.env
/* some defaults cannot be known in advance */
const config = {
/* Hosting */
PORT : env.PORT || 4000,
RATE_LIMIT_TIME : env.RATE_LIMIT_TIME || 15,
RATE_LIMIT_MAX_REQ : env.RATE_LIMIT_MAX_REQ || 1,
/* Misc */
ENVIRONMENT : env.ENVIRONMENT || "DEV",
/* Database */
DB_CONNECTION : env.DB_CONNECTION || null,
/* Access */
ADMIN_USER : env.ADMIN_USER || "admin",
ADMIN_PASSWORD : env.ADMIN_PASSWORD || "discoverbancor",
/* IPFS */
IPFS_HOST : env.IPFS_HOST || "ipfs.status.im",
IPFS_PORT : env.IPFS_PORT || "443",
IPFS_PROTOCOL : env.IPFS_PROTOCOL || "https",
/* Blockchain */
DISCOVER_CONTRACT : env.DISCOVER_CONTRACT || "0x426D564088681DF29242f8178e60E52D1F4cf8A3",
BLOCKCHAIN_CONNECTION_POINT : env.BLOCKCHAIN_CONNECTION_POINT || "wss://ropsten.infura.io/ws/v3/8675214b97b44e96b70d05326c61fd6a",
/* EMail */
EMAIL_USER : env.EMAIL_USER || null,
EMAIL_PASSWORD : env.EMAIL_PASSWORD || null,
EMAIL_HOST : env.EMAIL_HOST || null,
EMAIL_PORT : env.EMAIL_PORT || null,
EMAIL_TLS : env.EMAIL_TLS || null,
APPROVER_MAIL : env.APPROVER_MAIL || "dapps-approvals@status.im",
APPROVE_NOTIFIER_MAIL : env.APPROVE_NOTIFIER_MAIL || "dapps-approvals@status.im",
/* Logging */
CLOUDWATCH_ACCESS_KEY_ID : env.CLOUDWATCH_ACCESS_KEY_ID || null,
CLOUDWATCH_SECRET_ACCESS_KEY : env.CLOUDWATCH_SECRET_ACCESS_KEY || null,
CLOUDWATCH_REGION : env.CLOUDWATCH_REGION || null,
}
module.exports = config;

View File

@ -0,0 +1,18 @@
{
"EXCHANGES": "EXCHANGES",
"MARKETPLACES": "MARKETPLACES",
"COLLECTIBLES": "COLLECTIBLES",
"GAMES": "GAMES",
"SOCIAL_NETWORKS": "SOCIAL_NETWORKS",
"UTILITIES": "UTILITIES",
"OTHER": "OTHER",
"ALL_CATEGORIES": [
"EXCHANGES",
"MARKETPLACES",
"COLLECTIBLES",
"GAMES",
"SOCIAL_NETWORKS",
"UTILITIES",
"OTHER"
]
}

View File

@ -0,0 +1,10 @@
{
"NEW": "NEW",
"APPROVED": "APPROVED",
"UPDATED": "UPDATED",
"ALL_STATUS": [
"NEW",
"APPROVED",
"UPDATED"
]
}

View File

@ -0,0 +1,179 @@
const DAppMetadata = require('./../models/dapps-metadata-model')
const TemplateParser = require('./../inputs/template-parser')
const DAppsMetadataInputTemplates = require('./../inputs/templates/dapps-metadata')
const IPFSService = require('./../services/ipfs-service')
const DiscoverService = require('./../services/discover-service')
const DAppImageService = require('./../services/dapp-image-service')
const DAppMetadataService = require('./../services/dapp-metadata-service')
const ApprovalEmail = require('./../emails/approval-email')
const BadRequestError = require('./../errors/bad-request-error')
const DAPP_METADATA_STATUSES = require('./../constants/dapp-metadata-statuses')
const web3 = require('./../blockchain/web3')
const logger = require('./../logger/logger').getLoggerFor(
'DApps-Metadata-Controller',
)
class DAppsMetadataController {
static async uploadDAppMetadata(req, res) {
try {
const parsedInput = TemplateParser.parse(
req.body,
DAppsMetadataInputTemplates.UploadingTemplate,
)
const uploadedMetadata = await DAppMetadataService.upload(
req,
parsedInput,
)
logger.info(
`A dapp metadata with hash [${uploadedMetadata.hash}] has been uploaded successfully`,
)
res.status(200).json({ hash: uploadedMetadata.hash })
} catch (error) {
logger.error(error.message)
throw new BadRequestError(error)
}
}
static async sendApprovalEmail(req, res) {
const dappMetadata = await DAppMetadata.findOne({ hash: req.params.hash })
if (!dappMetadata) {
return void res.status(404).send()
}
if (dappMetadata.status == DAPP_METADATA_STATUSES.NEW) {
const approvalEmail = new ApprovalEmail(dappMetadata)
approvalEmail.send()
}
res.status(200).send()
}
static async setMetadataStatus(req, res) {
waitToBeMined(req.body.txHash, async () => {
const dapp = await DiscoverService.retrieveDApp(req.params.dappId)
const dappMetadata = await DAppMetadata.findByBytes32Hash(dapp.metadata)
const initialDAppMetadata = await DAppMetadata.findOne({
compressedMetadata: req.params.dappId,
})
if (
dappMetadata &&
initialDAppMetadata &&
initialDAppMetadata.status != DAPP_METADATA_STATUSES.NEW
) {
dappMetadata.status = DAPP_METADATA_STATUSES.APPROVED
dappMetadata.compressedMetadata = initialDAppMetadata.compressedMetadata
await dappMetadata.save()
initialDAppMetadata.status = DAPP_METADATA_STATUSES.UPDATED
await initialDAppMetadata.save()
}
})
res.status(200).send()
}
static async getDAppMetadata(req, res) {
try {
const dappMetadata = await DAppMetadata.findOne({ hash: req.params.hash })
if (dappMetadata) {
return void res
.status(200)
.jsonCutSensitives(dappMetadata, ['_id', '__v'])
}
res.status(404).send()
} catch (error) {
logger.error(error.message)
res.status(404).send()
}
}
static async getDAppImage(req, res) {
try {
const dappImage = await DAppImageService.retrieveImage(req.params.hash)
if (!dappImage) {
res.status(404).send()
}
const imageBuffer = Buffer.from(dappImage.content, 'base64')
/* allow for caching of images, since they are the bulk of requests */
res.set('Cache-Control', 'public, max-age=31557600')
res.set('Content-Type', 'image/png')
res.set('Content-Length', imageBuffer.length)
res.status(200)
return void res.end(imageBuffer)
} catch (error) {
logger.error(error.message)
res.status(404).send()
}
}
static async getAllDappsMetadata(req, res) {
const dappsMetadata = await DAppMetadata.find()
const dappsFormatedMetadata = {}
for (let i = 0; i < dappsMetadata.length; i++) {
const metadataHash = dappsMetadata[i].hash
dappsFormatedMetadata[metadataHash] = dappsMetadata[i]
}
/* don't cache for longer than 60 seconds to show new dapps quicker */
res.set('Cache-Control', 'public, max-age=60')
res.status(200).json(dappsFormatedMetadata)
}
static async approveDApp(req, res) {
const dappMetadata = await DAppMetadata.findOne({ hash: req.params.hash })
if (dappMetadata) {
dappMetadata.status = DAPP_METADATA_STATUSES.APPROVED
var json = JSON.stringify(dappMetadata.details);
dappMetadata.ipfsHash = await IPFSService.addContent(json)
await dappMetadata.save()
logger.info(`A dapp with hash [${dappMetadata.hash}] has been approved`)
return void res.status(200).send()
}
res.status(404).send()
}
static async rejectDApp(req, res) {
const dappMetadata = await DAppMetadata.findOne({ hash: req.params.hash })
if (dappMetadata) {
await dappMetadata.remove()
return void res.status(200).send()
}
res.status(404).send()
}
}
const waitToBeMined = async function (txHash, callback) {
const updateMetadataTx = await web3.eth.getTransaction(txHash)
if (!updateMetadataTx.blockNumber) {
setTimeout(() => {
waitToBeMined(txHash, callback)
}, 10000)
} else {
callback()
}
}
module.exports = DAppsMetadataController

View File

@ -0,0 +1,16 @@
const Email = require('./base-email');
const config = require('../config')
class ApprovalEmail extends Email {
constructor(dapp) {
const emailBody = `A DApp metadata ${JSON.stringify(dapp.details)} has been uploaded. You can connect with the Dapp owner at email: ${dapp.email}`;
super(
config.APPROVE_NOTIFIER_MAIL,
config.APPROVER_MAIL,
`Uploaded DApp Metadata. Hash - ${dapp.hash}`,
emailBody
);
}
}
module.exports = ApprovalEmail;

View File

@ -0,0 +1,34 @@
let nodemailer = require('nodemailer');
const SMTP_CONFIG = require('./smtp-config');
const logger = require('./../logger/logger').getLoggerFor('Base-Email');
class BaseEmail {
constructor(from, to, subject, body) {
this.from = from;
this.to = to;
this.subject = subject;
this.text = body;
}
async send() {
let smtpTransporter = nodemailer.createTransport(SMTP_CONFIG);
smtpTransporter.verify().then(
() => {
smtpTransporter.sendMail(this)
.then(() => {
logger.info(`Email with subject ${this.subject} was delivered successfully`);
})
.catch(mailError => {
logger.error(`Email was not delivered due to ${mailError}`);
});
}
).catch(verificationError => {
logger.error(`Email service verification failed due to ${verificationError}`);
});
}
}
module.exports = BaseEmail;

View File

@ -0,0 +1,11 @@
const config = require('../config')
module.exports = {
host: config.EMAIL_HOST,
port: config.EMAIL_PORT,
secure: config.EMAIL_TLS,
auth: {
user: config.EMAIL_USER,
pass: config.EMAIL_PASSWORD
}
};

View File

@ -0,0 +1,20 @@
const INTERNAL_ERRORS = {
'Error': 'Error',
'MongoError': 'MongoError',
'MongooseError': 'MongooseError'
}
class BadRequestError extends Error {
constructor(error) {
if (INTERNAL_ERRORS[error.constructor.name]) {
super('Bad request');
} else {
// This is supposed to be a custom error
super(error.message);
}
this.statusCode = 400;
}
}
module.exports = BadRequestError;

View File

@ -0,0 +1,27 @@
const TemplateValidationError = require('./template-validation-error');
class TemplatesParser {
static parse(data, template) {
let filteredInput = {};
Object.keys(template).forEach((property) => {
if (template[property].constructor.name === 'Object') {
TemplatesParser.parse(data[property], template[property]);
}
// Checks if the filter is required according to template
if (template[property] && !data[property] && data[property] != false) {
throw new TemplateValidationError(`${property} field is required`);
}
if (data[property]) {
filteredInput[property] = data[property];
}
});
return filteredInput;
}
}
module.exports = TemplatesParser;

View File

@ -0,0 +1,7 @@
class TemplateValidationError extends Error {
constructor(message) {
super(message);
}
}
module.exports = TemplateValidationError;

View File

@ -0,0 +1,14 @@
{
"UploadingTemplate": {
"email": true,
"metadata": {
"name": true,
"url": true,
"description": true,
"category": true,
"image": true,
"dateAdded": true,
"uploader": true
}
}
}

55
Backend/logger/logger.js Normal file
View File

@ -0,0 +1,55 @@
const winston = require('winston');
const CloudWatchTransport = require('winston-aws-cloudwatch');
const config = require('../config')
let baseConfig = {
logGroupName: `${config.ENVIRONMENT}-logs`,
createLogGroup: true,
createLogStream: true,
awsConfig: {
accessKeyId: config.CLOUDWATCH_ACCESS_KEY_ID,
secretAccessKey: config.CLOUDWATCH_SECRET_ACCESS_KEY,
region: config.CLOUDWATCH_REGION
}
}
let cloudWatchLogConfig = Object.assign({ logStreamName: `application-logs` }, baseConfig);
let loggerLevels = {
levels: {
error: 1,
warn: 2,
info: 3
}
}
module.exports = {
getLoggerFor: function (context) {
const transports = [new winston.transports.File({ filename: 'logs.log' })];
let logger = winston.createLogger({
levels: loggerLevels.levels,
level: 'info',
format: winston.format.combine(
winston.format.timestamp({
format: 'YYYY-MM-DD HH:mm:ss'
}),
winston.format.printf(log => {
return `[${log.timestamp}]-[${log.level.toUpperCase()}]-[${context}]: ${log.message}`;
})
),
transports: transports
});
if (config.ENVIRONMENT == 'DEV') {
logger.add(new winston.transports.Console());
} else {
// Set the Formatting per Logger, because we need to context
cloudWatchLogConfig.formatLog = function (log) {
return `[${log.level.toUpperCase()}]-[${context}]: ${log.message}`;
}
logger.add(new CloudWatchTransport(cloudWatchLogConfig));
}
return logger;
},
};

View File

@ -0,0 +1,74 @@
// This only works for express(4.x) with routes set in style ->
// router.#method('url', middleware, action)
// app.use('router url', router);
// #Method can be each one of the http's methods
//
// Errors Handler works for already routed app
// It's purpose is to handle routes and middleware errors
const logger = require('../../logger/logger').getLoggerFor('Global-API-error-handler');
class AppErrorsHandler {
static handleErrorsForApp(app) {
let appStack = app._router.stack;
for (let i = 0; i < appStack.length; i++) {
// Check if middleware
if (!appStack[i].handle.stack) {
appStack[i].handle = buildErrorHandlerForAction(appStack[i].handle);
}
// Check if router
if (appStack[i].handle.stack) {
let appRoute = appStack[i].handle.stack;
wrapEachRouteActionWithErrorHandler(appRoute);
}
}
return app;
}
}
let wrapEachRouteActionWithErrorHandler = function (appRoute, i = 0) {
if (i == appRoute.length) {
return void 0;
}
// Check for nested routes
if (appRoute[i].handle.stack) {
wrapEachRouteActionWithErrorHandler(appRoute[i].handle.stack);
} else {
let routeActions = appRoute[i].route.stack;
for (let j = 0; j < routeActions.length; j++) {
let routeAction = routeActions[j].handle;
routeActions[j].handle = buildErrorHandlerForAction(routeAction);
}
}
return wrapEachRouteActionWithErrorHandler(appRoute, ++i);
}
let buildErrorHandlerForAction = function (routeAction) {
return async function (req, res, next) {
try {
await routeAction(req, res, next);
} catch (error) {
let errorResponse = error.message ? { error: error.message } : error;
// Checks for custom errors
if (error.statusCode) {
res.status(error.statusCode).send(errorResponse);
} else {
logger.error(JSON.stringify(errorResponse));
res.status(500).send({ error: 'Something went wrong' });
}
}
};
}
module.exports = AppErrorsHandler;

View File

@ -0,0 +1,20 @@
let bodyParser = require('body-parser');
class BodyParserMiddleware {
static appendTo(app) {
app.use(bodyParser.urlencoded({ limit: '50mb', extended: true }));
// only json-type requests are valid
app.use(bodyParser.json({
limit: '50mb',
extended: true,
type: function () {
return true;
}
}));
}
}
module.exports = BodyParserMiddleware;

View File

@ -0,0 +1,9 @@
const cors = require('cors');
class CORSOriginMiddleware {
static appendTo(app) {
app.use(cors());
}
}
module.exports = CORSOriginMiddleware;

View File

@ -0,0 +1,40 @@
let helmet = require('helmet');
class HelmetMiddleware {
static appendTo(app) {
// Our api use only internal sources
// If someone tries to execute an external resource on our api, it wont be executed
// app.use(helmet.contentSecurityPolicy({
// directives: {
// defaultSrc: ["'self'"]
// }
// }));
// Expect-CT protect us from man-in-the-middle-attack over HTTPS
// It enforce browser to check in CT public log if a requester has a valid certificate
// TODO: Аdd report endpoint for monitoring if somebody tries to hack/mislead us
app.use(helmet.expectCt({
enforce: true,
maxAge: 60 // 1 minute
}));
/* Default setup
1. Turn DNS prefetching off -> does not convert domain to address
example.com in 93.184.216.34
2. Nobody except us can put our api in an iframe
3. X-Powered-By header is hidden and now express is not shown in the requests
4. Tells browsers to stick with HTTPS and never visit the insecure HTTP version
5. Untrusted HTML files could not be executed in the context of our api
This is because Internet Explorer functionality...
6. Checks that the sending content-type match exactly the format of sending data
*/
app.use(helmet());
}
}
module.exports = HelmetMiddleware;

View File

@ -0,0 +1,14 @@
const HelmetMiddleware = require('./helmet');
const CORSOriginsMiddleware = require('./cors');
const BodyParserMiddleware = require('./body-parser');
class MiddlewaresConfigurator {
static config(app) {
HelmetMiddleware.appendTo(app);
BodyParserMiddleware.appendTo(app);
CORSOriginsMiddleware.appendTo(app);
}
}
module.exports = MiddlewaresConfigurator;

View File

@ -0,0 +1,28 @@
const logger = require('../../logger/logger').getLoggerFor('Admin-authorization');
const parseBasicAuthorization = require('../../utils/authorization-utils').parseBasicAuthorization;
const config = require('../../config')
class AdminAuthorizationMiddleware {
static verifyUserAuth(auth) {
return (
auth.username == config.ADMIN_USER &&
auth.password == config.ADMIN_PASSWORD
)
}
static authorize(req, res, next) {
try {
let authorization = parseBasicAuthorization(req.headers.authorization);
if (AdminAuthorizationMiddleware.verifyUserAuth(authorization)) {
return void next();
}
throw new Error('Wrong admin credentials');
} catch (error) {
logger.error(error.message);
res.status(401).send();
}
}
}
module.exports = AdminAuthorizationMiddleware.authorize;

View File

@ -0,0 +1,29 @@
class ResponseCutSensitivesMiddleware {
static cutSensitives(req, res, next) {
res.jsonCutSensitives = function (response, sensitives) {
if (response instanceof Array) {
for (let i = 0; i < response.length; i++) {
response[i] = cutProperties(response[i], sensitives);
}
} else {
response = cutProperties(response, sensitives);
}
res.json(response);
}
next();
}
}
const cutProperties = function (objectToCut, sensitiveProps) {
let copiedResponse = JSON.parse(JSON.stringify(objectToCut));
for (const sensitiveProp of sensitiveProps) {
copiedResponse[sensitiveProp] = undefined;
}
return copiedResponse;
}
module.exports = ResponseCutSensitivesMiddleware.cutSensitives;

View File

@ -0,0 +1,24 @@
const rateLimit = require('express-rate-limit');
const logger = require('./../../logger/logger').getLoggerFor("Rate-Limit");
const config = require('./../../config')
class RateLimitMiddleware {
static setup() {
const windowMs = config.RATE_LIMIT_TIME;
const maxReq = config.RATE_LIMIT_MAX_REQ;
let limiter = rateLimit({
windowMs: windowMs,
max: maxReq,
handler: function (req, res) {
logger.warn(this.message);
res.status(this.statusCode).send({ error: this.message });
},
message: `Rate limit was reached, you are able to do ${maxReq} requests per ${windowMs} milliseconds`
});
return limiter;
}
}
module.exports = RateLimitMiddleware.setup();

View File

@ -0,0 +1,33 @@
const logger = require('../logger/logger').getLoggerFor('DAPPS-Images-Model');
const IPFSService = require('./../services/ipfs-service');
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
let DAppsImageSchema = new Schema({
id: Schema.Types.ObjectId,
content: String,
hash: {
type: String,
unique: true,
}
});
DAppsImageSchema.pre('save', async function () {
const content = this.content.split('base64,')[1];
if (!content) {
throw new Error('Invalid base64 image');
}
const data = Buffer.from(content, 'base64');
const hash = await IPFSService.addContent(data);
this.set({ content, hash });
});
DAppsImageSchema.statics.findByContent = async function (input) {
const content = input.split('base64,')[1];
const data = Buffer.from(content, 'base64');
const hash = await IPFSService.generateContentHash(data);
return this.findOne({ hash });
};
module.exports = mongoose.model('DAppsImage', DAppsImageSchema);

View File

@ -0,0 +1,88 @@
let mongoose = require('mongoose');
let Schema = mongoose.Schema;
const bs58 = require('bs58');
const validator = require('validator');
const dappCategories = require('./../constants/dapp-categories').ALL_CATEGORIES;
const metadataStatuses = require('./../constants/dapp-metadata-statuses').ALL_STATUSES;
const IPFSService = require('./../services/ipfs-service');
let DAppsMetadataSchema = new Schema({
id: Schema.Types.ObjectId,
details: {
name: {
type: String,
required: true
},
uploader: {
type: String,
required: true
},
url: {
type: String,
required: true
},
description: {
type: String,
required: true
},
category: {
type: String,
required: true,
enum: dappCategories
},
image: {
type: String,
required: true
},
dateAdded: {
type: Number,
required: true
},
},
email: {
type: String,
required: true,
validate: {
validator: function (value) {
return validator.isEmail(value);
},
message: props => `${props.value} is not a valid email!`
}
},
hash: {
type: String,
unique: true,
},
compressedMetadata: String,
status: {
type: String,
default: "NEW",
enum: metadataStatuses
},
/* This is used for new IPFS hash when DApp changes status */
ipfsHash: String
});
DAppsMetadataSchema.pre('save', async function () {
const hash = await IPFSService.addContent(this.details);
/* We set ipfsHash so even unapproved DApps have it set */
this.set({ hash, ipfsHash: hash });
});
DAppsMetadataSchema.statics.findByPlainMetadata = async function (metadata) {
const hash = await IPFSService.generateContentHash(metadata);
return this.findOne({ hash });
}
DAppsMetadataSchema.statics.findByBytes32Hash = async function (bytes32Hash) {
const hashHex = `1220${bytes32Hash.slice(2)}`;
const hashBytes = Buffer.from(hashHex, 'hex');
const encodedHash = bs58.encode(hashBytes);
return this.findOne({ hash: encodedHash });
}
module.exports = mongoose.model('DAppsMetadata', DAppsMetadataSchema);

35
Backend/package.json Normal file
View File

@ -0,0 +1,35 @@
{
"name": "status-discover-backend",
"version": "1.0.0",
"description": "Discover dapps backend",
"main": "server.js",
"scripts": {
"build": "",
"start": "nodemon server.js",
"start:dev": "node ./server.js",
"start:prod": "node ./server.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "Status",
"license": "ISC",
"dependencies": {
"body-parser": "1.19.0",
"bs58": "^4.0.1",
"cors": "2.8.5",
"dotenv": "8.0.0",
"embarkjs": "4.0.2",
"embarkjs-connector-web3": "^4.0.0",
"express": "4.17.1",
"express-rate-limit": "^4.0.3",
"helmet": "3.18.0",
"ipfs-http-client": "32.0.1",
"mongoose": "5.7.5",
"nodemailer": "6.3.0",
"nodemon": "1.19.1",
"validator": "11.0.0",
"web3": "^1.0.0-beta.55",
"web3-utils": "^1.0.0-beta.55",
"winston": "3.2.1",
"winston-aws-cloudwatch": "3.0.0"
}
}

View File

@ -0,0 +1,14 @@
const express = require('express');
const DAppsRoute = require('./dapps-routes');
class APIRouter {
static route(app) {
app.use('/metadata', DAppsRoute.build(express));
/* for ElasticBeanstalk Load Balancer healthcheck */
app.use('/healthcheck', async (req, res) => res.send('OK'))
}
}
module.exports = APIRouter;

View File

@ -0,0 +1,32 @@
const DAppsMetadataController = require('../controllers/dapps-metadata-controller');
const rateLimit = require('../middlewares/route-specifics/rate-limit');
const cutSensitives = require('../middlewares/route-specifics/cut-sensitives');
const adminAuthorization = require('../middlewares/route-specifics/admin-authorization');
class DappRoute {
static build(expressApp) {
let dappRoute = expressApp.Router();
dappRoute.post('/', rateLimit, DAppsMetadataController.uploadDAppMetadata);
dappRoute.post('/update/:dappId', DAppsMetadataController.setMetadataStatus);
dappRoute.post('/reject/:hash', adminAuthorization, DAppsMetadataController.rejectDApp);
dappRoute.post('/approve/email/:hash', DAppsMetadataController.sendApprovalEmail);
dappRoute.post('/approve/:hash', adminAuthorization, DAppsMetadataController.approveDApp);
dappRoute.get('/all', cutSensitives, DAppsMetadataController.getAllDappsMetadata);
dappRoute.get('/:hash', cutSensitives, DAppsMetadataController.getDAppMetadata);
dappRoute.get('/image/:hash', cutSensitives, DAppsMetadataController.getDAppImage);
return dappRoute;
}
}
module.exports = DappRoute;

51
Backend/server.js Normal file
View File

@ -0,0 +1,51 @@
const path = require('path')
const config = require('./config')
function setupSystem() {
let dotenv = require("dotenv");
dotenv.config();
let DBConfig = require('./config/db-config');
DBConfig.config();
}
async function setupAPI() {
let express = require("express");
let app = express();
/* accept headers like x-forwarded-proto from proxies */
app.set('trust proxy', 'loopback')
setupPreRoutedAppMiddlewares(app);
let APIRouter = require('./routes/api-router');
APIRouter.route(app);
setupPostRoutedAppMiddlewares(app);
app.use(express.static(
path.join(__dirname, '/frontend'),
{ maxAge: '30 days' }
));
/* Handles any requests that don't match the ones above */
app.get('*', (req,res) =>{
res.sendFile(path.join(__dirname, 'frontend/index.html'));
});
app.listen(config.PORT);
console.log(`Server started on port: ${config.PORT}`);
return app;
}
let setupPreRoutedAppMiddlewares = function (app) {
require('./middlewares/globals/middleswares-configurator').config(app);
}
let setupPostRoutedAppMiddlewares = function (app) {
require('./middlewares/globals/api-error-handler').handleErrorsForApp(app);
}
setupSystem();
module.exports = setupAPI();

View File

@ -0,0 +1,30 @@
const DAppImage = require('./../models/dapps-images-model');
class DAppImageService {
static async upload(req, image) {
try {
const uploadedImage = await DAppImage.create({ content: image });
return buildImageUrl(req, uploadedImage.hash);
} catch (error) {
// Code 11000 is because of uniqueness, so just return the already exist document
if (error.code == 11000) {
const existingImage = await DAppImage.findByContent(image);
return buildImageUrl(req, existingImage.hash);
}
throw new Error(error.message);
}
}
static async retrieveImage(imageHash) {
return DAppImage.findOne({ 'hash': imageHash });
}
}
const buildImageUrl = function (req, imageHash) {
return `/metadata/image/${imageHash}`;
}
module.exports = DAppImageService;

View File

@ -0,0 +1,45 @@
const validator = require('validator')
const web3Utils = require('web3-utils')
const DAppMetadata = require('./../models/dapps-metadata-model')
const DAppImageService = require('./../services/dapp-image-service')
class DAppMetadataService {
static async upload(req, details) {
try {
if (!validator.isURL(details.metadata.url, { require_protocol: true })) {
throw new Error(`Invalid url: ${details.metadata.url}`)
}
if (!web3Utils.isAddress(details.metadata.uploader)) {
throw new Error(
`Metadata uploader [${details.metadata.uploader}] is not a valid address`,
)
}
const compressedMetadata = web3Utils.keccak256(
JSON.stringify(details.metadata),
)
details.metadata.image = await DAppImageService.upload(
req,
details.metadata.image,
)
const dappMetadata = await DAppMetadata.create({
details: details.metadata,
compressedMetadata,
email: details.email,
})
return dappMetadata
} catch (error) {
// Code 11000 is because of uniqueness, so just return the already existing document
if (error.code == 11000) {
return DAppMetadata.findByPlainMetadata(details.metadata)
}
throw new Error(error.message)
}
}
}
module.exports = DAppMetadataService

View File

@ -0,0 +1,38 @@
const DiscoverContract = require('../blockchain/discover-contract');
const logger = require('./../logger/logger').getLoggerFor('Discover-Service');
const ACCOUNT = '0x0000000000000000000000000000000000000000';
class DiscoverService {
static async retrieveDApp(id) {
try {
const dappIndex = await DiscoverContract.methods
.id2index(id)
.call({ from: ACCOUNT });
const dapp = await DiscoverContract.methods
.dapps(dappIndex)
.call({ from: ACCOUNT });
if (dapp.id != id) {
throw new Error('Error fetching correct data from contract')
}
return dapp;
} catch (error) {
logger.error(error.message);
throw new Error(`A dapp with id [${id}] is not found in the contract`);
}
}
static async hasStaked(dappId) {
const dapp = await DiscoverService.retrieveDApp(dappId);
return dapp.effectiveBalance > 0;
}
}
module.exports = DiscoverService;

View File

@ -0,0 +1,39 @@
const ipfsClient = require('ipfs-http-client');
const logger = require('../logger/logger').getLoggerFor('IPFS-Service');
const config = require('../config')
class IPFSService {
constructor() {
if (!IPFSService.instance) {
this.storage = ipfsClient(
config.IPFS_HOST,
config.IPFS_PORT,
{ protocol: config.IPFS_PROTOCOL }
)
IPFSService.instance = this;
}
return IPFSService.instance;
}
async addContent(content, filename='data.json') {
let data
if (Buffer.isBuffer(content)) {
data = content
} else if (typeof content == "object") {
data = Buffer.from(JSON.stringify(content));
} else {
data = Buffer.from(content);
}
const resp = await this.storage.add(data, {pin: true});
logger.info(`Content uploaded to IPFS: ${resp[0].hash}`);
return resp[0].hash;
}
async generateContentHash(content) {
return this.addContent(content);
}
}
module.exports = new IPFSService();

View File

@ -0,0 +1,16 @@
module.exports = {
parseBasicAuthorization: function (authHeader) {
if (!authHeader) {
throw new Error('Authorization not provided');
}
let authString = authHeader.split(/\s/)[1];
let stringifiedAuth = Buffer.from(authString, 'base64').toString();
let authParts = stringifiedAuth.split(':');
return {
username: authParts[0],
password: authParts[1]
}
}
}

6216
Backend/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

7
Contracts/.env.example Normal file
View File

@ -0,0 +1,7 @@
NODE_ENV=localhost
WALLET_PASSWORD=dev_password
WALLET_MNEMONIC=erupt point century seek certain escape solution flee elegant hard please pen
EMBARK_TARGET=testnet
DB_NAME=status_discover
DB_STORE_DAPPS=store_dapps

View File

View File

@ -0,0 +1,423 @@
pragma solidity ^0.5.2;
import "./token/MiniMeTokenInterface.sol";
import "./token/ApproveAndCallFallBack.sol";
import "./utils/SafeMath.sol";
import "./utils/BancorFormula.sol";
import "./common/Controlled.sol";
contract Discover is Controlled, ApproveAndCallFallBack, BancorFormula {
using SafeMath for uint;
// Could be any MiniMe token
MiniMeTokenInterface SNT;
// Total SNT in circulation
uint public total;
// Parameter to calculate Max SNT any one DApp can stake
uint public ceiling;
// The max amount of tokens it is possible to stake, as a percentage of the total in circulation
uint public max;
// Decimal precision for this contract
uint public decimals;
// Prevents overflows in votesMinted
uint public safeMax;
// Whether we need more than an id param to identify arbitrary data must still be discussed.
struct Data {
address developer;
bytes32 id;
bytes32 metadata;
uint balance;
uint rate;
uint available;
uint votesMinted;
uint votesCast;
uint effectiveBalance;
}
Data[] public dapps;
mapping(bytes32 => uint) public id2index;
mapping(bytes32 => bool) public existingIDs;
event DAppCreated(bytes32 indexed id, uint newEffectiveBalance);
event Upvote(bytes32 indexed id, uint newEffectiveBalance);
event Downvote(bytes32 indexed id, uint newEffectiveBalance);
event Withdraw(bytes32 indexed id, uint newEffectiveBalance);
event MetadataUpdated(bytes32 indexed id);
event CeilingUpdated(uint oldCeiling, uint newCeiling);
constructor(MiniMeTokenInterface _SNT) public {
SNT = _SNT;
total = 6804870174;
ceiling = 292; // See here for more: https://observablehq.com/@andytudhope/dapp-store-snt-curation-mechanism
decimals = 1000000; // 4 decimal points for %, 2 because we only use 1/100th of total in circulation
max = total.mul(ceiling).div(decimals);
safeMax = uint(77).mul(max).div(100); // Limited by accuracy of BancorFormula
}
/**
* @dev Update ceiling
* @param _newCeiling New ceiling value
*/
function setCeiling(uint _newCeiling) external onlyController {
emit CeilingUpdated(ceiling, _newCeiling);
ceiling = _newCeiling;
max = total.mul(ceiling).div(decimals);
safeMax = uint(77).mul(max).div(100);
}
/**
* @dev Anyone can create a DApp (i.e an arb piece of data this contract happens to care about).
* @param _id bytes32 unique identifier.
* @param _amount of tokens to stake on initial ranking.
* @param _metadata metadata hex string
*/
function createDApp(bytes32 _id, uint _amount, bytes32 _metadata) external {
_createDApp(
msg.sender,
_id,
_amount,
_metadata);
}
/**
* @dev Sends SNT directly to the contract, not the developer. This gets added to the DApp's balance, no curve required.
* @param _id bytes32 unique identifier.
* @param _amount of tokens to stake on DApp's ranking. Used for upvoting + staking more.
*/
function upvote(bytes32 _id, uint _amount) external {
_upvote(msg.sender, _id, _amount);
}
/**
* @dev Sends SNT to the developer and lowers the DApp's effective balance by 1%
* @param _id bytes32 unique identifier.
* @param _amount uint, included for approveAndCallFallBack
*/
function downvote(bytes32 _id, uint _amount) external {
_downvote(msg.sender, _id, _amount);
}
/**
* @dev Developers can withdraw an amount not more than what was available of the
SNT they originally staked minus what they have already received back in downvotes.
* @param _id bytes32 unique identifier.
* @return max SNT that can be withdrawn == available SNT for DApp.
*/
function withdrawMax(bytes32 _id) external view returns(uint) {
Data storage d = _getDAppById(_id);
return d.available;
}
/**
* @dev Developers can withdraw an amount not more than what was available of the
SNT they originally staked minus what they have already received back in downvotes.
* @param _id bytes32 unique identifier.
* @param _amount of tokens to withdraw from DApp's overall balance.
*/
function withdraw(bytes32 _id, uint _amount) external {
Data storage d = _getDAppById(_id);
uint256 tokensQuantity = _amount.div(1 ether);
require(msg.sender == d.developer, "Only the developer can withdraw SNT staked on this data");
require(tokensQuantity <= d.available, "You can only withdraw a percentage of the SNT staked, less what you have already received");
uint precision;
uint result;
d.balance = d.balance.sub(tokensQuantity);
d.rate = decimals.sub(d.balance.mul(decimals).div(max));
d.available = d.balance.mul(d.rate);
(result, precision) = BancorFormula.power(
d.available,
decimals,
uint32(decimals),
uint32(d.rate));
d.votesMinted = result >> precision;
if (d.votesCast > d.votesMinted) {
d.votesCast = d.votesMinted;
}
uint temp1 = d.votesCast.mul(d.rate).mul(d.available);
uint temp2 = d.votesMinted.mul(decimals).mul(decimals);
uint effect = temp1.div(temp2);
d.effectiveBalance = d.balance.sub(effect);
require(SNT.transfer(d.developer, _amount), "Transfer failed");
emit Withdraw(_id, d.effectiveBalance);
}
/**
* dev Set the content for the dapp
* @param _id bytes32 unique identifier.
* @param _metadata metadata info
*/
function setMetadata(bytes32 _id, bytes32 _metadata) external {
uint dappIdx = id2index[_id];
Data storage d = dapps[dappIdx];
require(d.developer == msg.sender, "Only the developer can update the metadata");
d.metadata = _metadata;
emit MetadataUpdated(_id);
}
/**
* @dev Used in UI in order to fetch all dapps
* @return dapps count
*/
function getDAppsCount() external view returns(uint) {
return dapps.length;
}
/**
* @notice Support for "approveAndCall".
* @param _from Who approved.
* @param _amount Amount being approved, needs to be equal `_amount` or `cost`.
* @param _token Token being approved, needs to be `SNT`.
* @param _data Abi encoded data with selector of `register(bytes32,address,bytes32,bytes32)`.
*/
function receiveApproval(
address _from,
uint256 _amount,
address _token,
bytes calldata _data
)
external
{
require(_token == address(SNT), "Wrong token");
require(_token == address(msg.sender), "Wrong account");
require(_data.length <= 196, "Incorrect data");
bytes4 sig;
bytes32 id;
uint256 amount;
bytes32 metadata;
(sig, id, amount, metadata) = abiDecodeRegister(_data);
require(_amount == amount, "Wrong amount");
if (sig == bytes4(0x7e38d973)) {
_createDApp(
_from,
id,
amount,
metadata);
} else if (sig == bytes4(0xac769090)) {
_downvote(_from, id, amount);
} else if (sig == bytes4(0x2b3df690)) {
_upvote(_from, id, amount);
} else {
revert("Wrong method selector");
}
}
/**
* @dev Used in UI to display effect on ranking of user's donation
* @param _id bytes32 unique identifier.
* @param _amount of tokens to stake/"donate" to this DApp's ranking.
* @return effect of donation on DApp's effectiveBalance
*/
function upvoteEffect(bytes32 _id, uint _amount) external view returns(uint effect) {
Data memory d = _getDAppById(_id);
require(d.balance.add(_amount) <= safeMax, "You cannot upvote by this much, try with a lower amount");
// Special case - no downvotes yet cast
if (d.votesCast == 0) {
return _amount;
}
uint precision;
uint result;
uint mBalance = d.balance.add(_amount);
uint mRate = decimals.sub(mBalance.mul(decimals).div(max));
uint mAvailable = mBalance.mul(mRate);
(result, precision) = BancorFormula.power(
mAvailable,
decimals,
uint32(decimals),
uint32(mRate));
uint mVMinted = result >> precision;
uint temp1 = d.votesCast.mul(mRate).mul(mAvailable);
uint temp2 = mVMinted.mul(decimals).mul(decimals);
uint mEffect = temp1.div(temp2);
uint mEBalance = mBalance.sub(mEffect);
return (mEBalance.sub(d.effectiveBalance));
}
/**
* @dev Downvotes always remove 1% of the current ranking.
* @param _id bytes32 unique identifier.
* @return balance_down_by, votes_required, cost
*/
function downvoteCost(bytes32 _id) external view returns(uint b, uint vR, uint c) {
Data memory d = _getDAppById(_id);
return _downvoteCost(d);
}
function _createDApp(
address _from,
bytes32 _id,
uint _amount,
bytes32 _metadata
)
internal
{
require(!existingIDs[_id], "You must submit a unique ID");
uint256 tokensQuantity = _amount.div(1 ether);
require(tokensQuantity > 0, "You must spend some SNT to submit a ranking in order to avoid spam");
require (tokensQuantity <= safeMax, "You cannot stake more SNT than the ceiling dictates");
uint dappIdx = dapps.length;
dapps.length++;
Data storage d = dapps[dappIdx];
d.developer = _from;
d.id = _id;
d.metadata = _metadata;
uint precision;
uint result;
d.balance = tokensQuantity;
d.rate = decimals.sub((d.balance).mul(decimals).div(max));
d.available = d.balance.mul(d.rate);
(result, precision) = BancorFormula.power(
d.available,
decimals,
uint32(decimals),
uint32(d.rate));
d.votesMinted = result >> precision;
d.votesCast = 0;
d.effectiveBalance = tokensQuantity;
id2index[_id] = dappIdx;
existingIDs[_id] = true;
require(SNT.transferFrom(_from, address(this), _amount), "Transfer failed");
emit DAppCreated(_id, d.effectiveBalance);
}
function _upvote(address _from, bytes32 _id, uint _amount) internal {
uint256 tokensQuantity = _amount.div(1 ether);
require(tokensQuantity > 0, "You must send some SNT in order to upvote");
Data storage d = _getDAppById(_id);
require(d.balance.add(tokensQuantity) <= safeMax, "You cannot upvote by this much, try with a lower amount");
uint precision;
uint result;
d.balance = d.balance.add(tokensQuantity);
d.rate = decimals.sub((d.balance).mul(decimals).div(max));
d.available = d.balance.mul(d.rate);
(result, precision) = BancorFormula.power(
d.available,
decimals,
uint32(decimals),
uint32(d.rate));
d.votesMinted = result >> precision;
uint temp1 = d.votesCast.mul(d.rate).mul(d.available);
uint temp2 = d.votesMinted.mul(decimals).mul(decimals);
uint effect = temp1.div(temp2);
d.effectiveBalance = d.balance.sub(effect);
require(SNT.transferFrom(_from, address(this), _amount), "Transfer failed");
emit Upvote(_id, d.effectiveBalance);
}
function _downvote(address _from, bytes32 _id, uint _amount) internal {
uint256 tokensQuantity = _amount.div(1 ether);
Data storage d = _getDAppById(_id);
(uint b, uint vR, uint c) = _downvoteCost(d);
require(tokensQuantity == c, "Incorrect amount: valid iff effect on ranking is 1%");
d.available = d.available.sub(tokensQuantity);
d.votesCast = d.votesCast.add(vR);
d.effectiveBalance = d.effectiveBalance.sub(b);
require(SNT.transferFrom(_from, d.developer, _amount), "Transfer failed");
emit Downvote(_id, d.effectiveBalance);
}
function _downvoteCost(Data memory d) internal view returns(uint b, uint vR, uint c) {
uint balanceDownBy = (d.effectiveBalance.div(100));
uint votesRequired = (balanceDownBy.mul(d.votesMinted).mul(d.rate)).div(d.available);
uint votesAvailable = d.votesMinted.sub(d.votesCast).sub(votesRequired);
uint temp = (d.available.div(votesAvailable)).mul(votesRequired);
uint cost = temp.div(decimals);
return (balanceDownBy, votesRequired, cost);
}
/**
* @dev Used internally in order to get a dapp while checking if it exists
* @return existing dapp
*/
function _getDAppById(bytes32 _id) internal view returns(Data storage d) {
uint dappIdx = id2index[_id];
d = dapps[dappIdx];
require(d.id == _id, "Error fetching correct data");
}
/**
* @dev Decodes abi encoded data with selector for "functionName(bytes32,uint256)".
* @param _data Abi encoded data.
* @return Decoded registry call.
*/
function abiDecodeRegister(
bytes memory _data
)
private
pure
returns(
bytes4 sig,
bytes32 id,
uint256 amount,
bytes32 metadata
)
{
assembly {
sig := mload(add(_data, add(0x20, 0)))
id := mload(add(_data, 36))
amount := mload(add(_data, 68))
metadata := mload(add(_data, 100))
}
}
}

View File

@ -0,0 +1,145 @@
pragma solidity ^0.5.2;
import "./kyber/KyberNetworkProxy.sol";
import "./Discover.sol";
import "./token/ERC20Token.sol";
contract DiscoverKyberSwap is Controlled {
address public SNT;
address public ETH;
KyberNetworkProxy public kyberProxy;
Discover public discover;
address public walletId;
uint public maxSlippage;
/**
* @param _discover Discover contract address
* @param _kyberProxy Kyber Network Proxy address
* @param _ETH Kyber ETH address
* @param _SNT Kyber SNT address
* @param _walletId Wallet for Kyber network fees
* @param _maxSlippage Max slippage rate
*/
constructor(address _discover, address _kyberProxy, address _ETH, address _SNT, address _walletId, uint _maxSlippage) public {
require(_maxSlippage < 100);
discover = Discover(_discover);
kyberProxy = KyberNetworkProxy(_kyberProxy);
ETH = _ETH;
SNT = _SNT;
walletId = _walletId;
maxSlippage = _maxSlippage;
}
/**
* @notice Gets the conversion rate for the destToken given the srcQty.
* @param srcToken source token contract address
* @param srcQty amount of source tokens
* @return exchange rate
*/
function getConversionRates(address srcToken, uint srcQty) public view returns (uint expectedRate, uint slippageRate)
{
if(srcToken == address(0)){
srcToken = ETH;
}
(expectedRate, slippageRate) = kyberProxy.getExpectedRate(srcToken, SNT, srcQty);
require(expectedRate > 0);
}
/**
* @notice Upvote in discover
* @dev Requires a msg.value if using ETH
* @param _id Id to upvote
* @param _token Token to convert to SNT (see https://developer.kyber.network/docs/Environments-Intro/). Address 0 can be used for ETH too
* @param _amount Amount of tokens/eth to convert
*/
function upvote(bytes32 _id, address _token, uint _amount) public payable {
uint sntAmount = _tradeTokens(_token, _amount);
discover.upvote(_id, sntAmount);
}
/**
* @notice Downvote in discover
* @dev Requires a msg.value if using ETH
* @param _id Id to upvote
* @param _token Token to convert to SNT (see https://developer.kyber.network/docs/Environments-Intro/). Address 0 can be used for ETH too
* @param _amount Amount of tokens/eth to convert
*/
function downvote(bytes32 _id, address _token, uint _amount) public payable {
uint sntAmount = _tradeTokens(_token, _amount);
discover.downvote(_id, sntAmount);
}
/**
* @dev Trades tokens/ETH to SNT using Kyber
* @param _token Token to convert to SNT (see https://developer.kyber.network/docs/Environments-Intro/). Address 0 can be used for ETH too
* @param _amount Amount of tokens/eth to convert
* @return Amount of SNT received from the conversion
*/
function _tradeTokens(address _token, uint _amount) internal returns(uint sntAmount) {
uint minConversionRate;
uint slippageRate;
uint slippagePercent;
ERC20Token sntToken = ERC20Token(SNT);
if (_token == address(0) || _token == ETH) {
require(msg.value == _amount, "Not enough ETH");
(minConversionRate, slippageRate) = getConversionRates(ETH, _amount);
slippagePercent = 100 - ((slippageRate * 100) / minConversionRate);
require(slippagePercent <= maxSlippage);
sntAmount = kyberProxy.trade.value(_amount)(ETH, _amount, SNT, address(this), 0 - uint256(1), minConversionRate, walletId);
} else {
ERC20Token t = ERC20Token(_token);
// Initially transfer the tokens from the user to this contract
require(t.transferFrom(msg.sender, address(this), _amount));
if (_token != SNT) {
// Mitigate ERC20 Approve front-running attack, by initially setting allowance to 0
require(t.approve(address(kyberProxy), 0), "Could not reset token approval");
// Set the spender's token allowance to tokenQty
require(t.approve(address(kyberProxy), _amount), "Could not approve token amount");
(minConversionRate, slippageRate) = getConversionRates(_token, _amount);
slippagePercent = 100 - ((slippageRate * 100) / minConversionRate);
require(slippagePercent <= maxSlippage);
sntAmount = kyberProxy.trade(_token, _amount, SNT, address(this), 0 - uint256(1), minConversionRate, walletId);
} else {
sntAmount = _amount;
}
}
require(sntAmount != 0, "Not enough SNT for vote");
require(sntToken.approve(address(discover), 0), "Could not reset SNT approval");
require(sntToken.approve(address(discover), sntAmount), "Could not approve SNT amount");
}
event WalletIdChanged(address sender, address prevWalletId, address newWalletId);
/**
* @dev Changes the walletId address (for the fee sharing program)
* @param _walletId New walletId address
*/
function setWalletId(address _walletId) external onlyController {
emit WalletIdChanged(msg.sender, walletId, _walletId);
walletId = _walletId;
}
event SlippageUpdated(uint maxSlippage);
/**
* @param _maxSlippage most slippage as a percentage
*/
function setSlippage(uint _maxSlippage) public onlyController {
require(_maxSlippage < 100);
maxSlippage = _maxSlippage;
emit SlippageUpdated(_maxSlippage);
}
}

View File

@ -0,0 +1,23 @@
pragma solidity ^0.5.2;
contract Controlled {
/// @notice The address of the controller is the only address that can call
/// a function with this modifier
modifier onlyController {
require(msg.sender == controller, "Unauthorized");
_;
}
address payable public controller;
constructor() internal {
controller = msg.sender;
}
/// @notice Changes the controller of the contract
/// @param _newController The new controller of the contract
function changeController(address payable _newController) external onlyController {
controller = _newController;
}
}

View File

@ -0,0 +1,67 @@
pragma solidity >=0.5.0 <0.6.0;
/**
* @title KyberNetworkProxy
* @dev Mock of the KyberNetworkProxy. Only used in development
*/
contract KyberNetworkProxy {
constructor() public {
}
/**
* @dev Get a mocked up rate for the trade
*/
function getExpectedRate(
address /* src */,
address /* dest */,
uint /* srcQty */
)
public pure
returns(uint expectedRate, uint slippageRate)
{
return (32749000000000000000, 31766530000000000000);
}
/// @notice use token address ETH_TOKEN_ADDRESS for ether
/// @dev makes a trade between src and dest token and send dest token to destAddress
/// @param maxDestAmount A limit on the amount of dest tokens
/// @return amount of actual dest tokens
function trade(
address /* src */,
uint /* srcAmount */,
address /* dest */,
address /* destAddress */,
uint maxDestAmount,
uint /* minConversionRate */,
address /* walletId */
)
public
payable
returns(uint)
{
return maxDestAmount;
}
/// @dev makes a trade between src and dest token and send dest tokens to msg sender
/// @return amount of actual dest tokens
function swapTokenToToken(
address /* src */,
uint /* srcAmount */,
address /* dest */,
uint /* minConversionRate */
)
public pure
returns(uint)
{
return 100;
}
/// @dev makes a trade from Ether to token. Sends token to msg sender
/// @return amount of actual dest tokens
function swapEtherToToken(
address /* token */,
uint /* minConversionRate */
) public payable returns(uint) {
return 200;
}
}

View File

@ -0,0 +1,44 @@
pragma solidity ^0.5.2;
import "../utils/BancorFormula.sol";
contract TestBancorFormula is BancorFormula {
function powerTest(
uint256 _baseN,
uint256 _baseD,
uint32 _expN,
uint32 _expD)
external view returns (uint256, uint8)
{
return super.power(
_baseN,
_baseD,
_expN,
_expD);
}
function generalLogTest(uint256 x) external pure returns (uint256) {
return super.generalLog(x);
}
function floorLog2Test(uint256 _n) external pure returns (uint8) {
return super.floorLog2(_n);
}
function findPositionInMaxExpArrayTest(uint256 _x) external view returns (uint8) {
return super.findPositionInMaxExpArray(_x);
}
function generalExpTest(uint256 _x, uint8 _precision) external pure returns (uint256) {
return super.generalExp(_x, _precision);
}
function optimalLogTest(uint256 x) external pure returns (uint256) {
return super.optimalLog(x);
}
function optimalExpTest(uint256 x) external pure returns (uint256) {
return super.optimalExp(x);
}
}

View File

@ -0,0 +1,10 @@
pragma solidity ^0.5.2;
contract ApproveAndCallFallBack {
function receiveApproval(
address from,
uint256 _amount,
address _token,
bytes calldata _data) external;
}

View File

@ -0,0 +1,53 @@
pragma solidity ^0.5.2;
// Abstract contract for the full ERC 20 Token standard
// https://github.com/ethereum/EIPs/issues/20
interface ERC20Token {
/**
* @notice send `_value` token to `_to` from `msg.sender`
* @param _to The address of the recipient
* @param _value The amount of token to be transferred
* @return Whether the transfer was successful or not
*/
function transfer(address _to, uint256 _value) external returns (bool success);
/**
* @notice `msg.sender` approves `_spender` to spend `_value` tokens
* @param _spender The address of the account able to transfer the tokens
* @param _value The amount of tokens to be approved for transfer
* @return Whether the approval was successful or not
*/
function approve(address _spender, uint256 _value) external returns (bool success);
/**
* @notice send `_value` token to `_to` from `_from` on the condition it is approved by `_from`
* @param _from The address of the sender
* @param _to The address of the recipient
* @param _value The amount of token to be transferred
* @return Whether the transfer was successful or not
*/
function transferFrom(address _from, address _to, uint256 _value) external returns (bool success);
/**
* @param _owner The address from which the balance will be retrieved
* @return The balance
*/
function balanceOf(address _owner) external view returns (uint256 balance);
/**
* @param _owner The address of the account owning tokens
* @param _spender The address of the account able to transfer the tokens
* @return Amount of remaining tokens allowed to spent
*/
function allowance(address _owner, address _spender) external view returns (uint256 remaining);
/**
* @notice return total supply of tokens
*/
function totalSupply() external view returns (uint256 supply);
event Transfer(address indexed _from, address indexed _to, uint256 _value);
event Approval(address indexed _owner, address indexed _spender, uint256 _value);
}

View File

@ -0,0 +1,634 @@
pragma solidity ^0.5.2;
/*
Copyright 2016, Jordi Baylina
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* @title MiniMeToken Contract
* @author Jordi Baylina
* @dev This token contract's goal is to make it easy for anyone to clone this
* token using the token distribution at a given block, this will allow DAO's
* and DApps to upgrade their features in a decentralized manner without
* affecting the original token
* @dev It is ERC20 compliant, but still needs to under go further testing.
*/
import "../common/Controlled.sol";
import "./TokenController.sol";
import "./ApproveAndCallFallBack.sol";
import "./MiniMeTokenInterface.sol";
import "./TokenFactory.sol";
/**
* @dev The actual token contract, the default controller is the msg.sender
* that deploys the contract, so usually this token will be deployed by a
* token controller contract, which Giveth will call a "Campaign"
*/
contract MiniMeToken is MiniMeTokenInterface, Controlled {
string public name; //The Token's name: e.g. DigixDAO Tokens
uint8 public decimals; //Number of decimals of the smallest unit
string public symbol; //An identifier: e.g. REP
string public constant VERSION = "MMT_0.1"; //An arbitrary versioning scheme
/**
* @dev `Checkpoint` is the structure that attaches a block number to a
* given value, the block number attached is the one that last changed the
* value
*/
struct Checkpoint {
// `fromBlock` is the block number that the value was generated from
uint128 fromBlock;
// `value` is the amount of tokens at a specific block number
uint128 value;
}
// `parentToken` is the Token address that was cloned to produce this token;
// it will be 0x0 for a token that was not cloned
MiniMeToken public parentToken;
// `parentSnapShotBlock` is the block number from the Parent Token that was
// used to determine the initial distribution of the Clone Token
uint public parentSnapShotBlock;
// `creationBlock` is the block number that the Clone Token was created
uint public creationBlock;
// `balances` is the map that tracks the balance of each address, in this
// contract when the balance changes the block number that the change
// occurred is also included in the map
mapping (address => Checkpoint[]) balances;
// `allowed` tracks any extra transfer rights as in all ERC20 tokens
mapping (address => mapping (address => uint256)) allowed;
// Tracks the history of the `totalSupply` of the token
Checkpoint[] totalSupplyHistory;
// Flag that determines if the token is transferable or not.
bool public transfersEnabled;
// The factory used to create new clone tokens
TokenFactory public tokenFactory;
////////////////
// Constructor
////////////////
/**
* @notice Constructor to create a MiniMeToken
* @param _tokenFactory The address of the MiniMeTokenFactory contract that
* will create the Clone token contracts, the token factory needs to be
* deployed first
* @param _parentToken Address of the parent token, set to 0x0 if it is a
* new token
* @param _parentSnapShotBlock Block of the parent token that will
* determine the initial distribution of the clone token, set to 0 if it
* is a new token
* @param _tokenName Name of the new token
* @param _decimalUnits Number of decimals of the new token
* @param _tokenSymbol Token Symbol for the new token
* @param _transfersEnabled If true, tokens will be able to be transferred
*/
constructor(
address _tokenFactory,
address _parentToken,
uint _parentSnapShotBlock,
string memory _tokenName,
uint8 _decimalUnits,
string memory _tokenSymbol,
bool _transfersEnabled
)
public
{
tokenFactory = TokenFactory(_tokenFactory);
name = _tokenName; // Set the name
decimals = _decimalUnits; // Set the decimals
symbol = _tokenSymbol; // Set the symbol
parentToken = MiniMeToken(address(uint160(_parentToken)));
parentSnapShotBlock = _parentSnapShotBlock;
transfersEnabled = _transfersEnabled;
creationBlock = block.number;
}
///////////////////
// ERC20 Methods
///////////////////
/**
* @notice Send `_amount` tokens to `_to` from `msg.sender`
* @param _to The address of the recipient
* @param _amount The amount of tokens to be transferred
* @return Whether the transfer was successful or not
*/
function transfer(address _to, uint256 _amount) external returns (bool success) {
require(transfersEnabled);
return doTransfer(msg.sender, _to, _amount);
}
/**
* @notice Send `_amount` tokens to `_to` from `_from` on the condition it
* is approved by `_from`
* @param _from The address holding the tokens being transferred
* @param _to The address of the recipient
* @param _amount The amount of tokens to be transferred
* @return True if the transfer was successful
*/
function transferFrom(
address _from,
address _to,
uint256 _amount
)
external
returns (bool success)
{
// The controller of this contract can move tokens around at will,
// this is important to recognize! Confirm that you trust the
// controller of this contract, which in most situations should be
// another open source smart contract or 0x0
if (msg.sender != controller) {
require(transfersEnabled);
// The standard ERC 20 transferFrom functionality
if (allowed[_from][msg.sender] < _amount) {
return false;
}
allowed[_from][msg.sender] -= _amount;
}
return doTransfer(_from, _to, _amount);
}
/**
* @dev This is the actual transfer function in the token contract, it can
* only be called by other functions in this contract.
* @param _from The address holding the tokens being transferred
* @param _to The address of the recipient
* @param _amount The amount of tokens to be transferred
* @return True if the transfer was successful
*/
function doTransfer(
address _from,
address _to,
uint _amount
)
internal
returns(bool)
{
if (_amount == 0) {
return true;
}
require(parentSnapShotBlock < block.number);
// Do not allow transfer to 0x0 or the token contract itself
require((_to != address(0)) && (_to != address(this)));
// If the amount being transfered is more than the balance of the
// account the transfer returns false
uint256 previousBalanceFrom = balanceOfAt(_from, block.number);
if (previousBalanceFrom < _amount) {
return false;
}
// Alerts the token controller of the transfer
if (isContract(controller)) {
require(TokenController(controller).onTransfer(_from, _to, _amount));
}
// First update the balance array with the new value for the address
// sending the tokens
updateValueAtNow(balances[_from], previousBalanceFrom - _amount);
// Then update the balance array with the new value for the address
// receiving the tokens
uint256 previousBalanceTo = balanceOfAt(_to, block.number);
require(previousBalanceTo + _amount >= previousBalanceTo); // Check for overflow
updateValueAtNow(balances[_to], previousBalanceTo + _amount);
// An event to make the transfer easy to find on the blockchain
emit Transfer(_from, _to, _amount);
return true;
}
function doApprove(
address _from,
address _spender,
uint256 _amount
)
internal
returns (bool)
{
require(transfersEnabled);
// To change the approve amount you first have to reduce the addresses`
// allowance to zero by calling `approve(_spender,0)` if it is not
// already 0 to mitigate the race condition described here:
// https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729
require((_amount == 0) || (allowed[_from][_spender] == 0));
// Alerts the token controller of the approve function call
if (isContract(controller)) {
require(TokenController(controller).onApprove(_from, _spender, _amount));
}
allowed[_from][_spender] = _amount;
emit Approval(_from, _spender, _amount);
return true;
}
/**
* @param _owner The address that's balance is being requested
* @return The balance of `_owner` at the current block
*/
function balanceOf(address _owner) external view returns (uint256 balance) {
return balanceOfAt(_owner, block.number);
}
/**
* @notice `msg.sender` approves `_spender` to spend `_amount` tokens on
* its behalf. This is a modified version of the ERC20 approve function
* to be a little bit safer
* @param _spender The address of the account able to transfer the tokens
* @param _amount The amount of tokens to be approved for transfer
* @return True if the approval was successful
*/
function approve(address _spender, uint256 _amount) external returns (bool success) {
doApprove(msg.sender, _spender, _amount);
}
/**
* @dev This function makes it easy to read the `allowed[]` map
* @param _owner The address of the account that owns the token
* @param _spender The address of the account able to transfer the tokens
* @return Amount of remaining tokens of _owner that _spender is allowed
* to spend
*/
function allowance(
address _owner,
address _spender
)
external
view
returns (uint256 remaining)
{
return allowed[_owner][_spender];
}
/**
* @notice `msg.sender` approves `_spender` to send `_amount` tokens on
* its behalf, and then a function is triggered in the contract that is
* being approved, `_spender`. This allows users to use their tokens to
* interact with contracts in one function call instead of two
* @param _spender The address of the contract able to transfer the tokens
* @param _amount The amount of tokens to be approved for transfer
* @return True if the function call was successful
*/
function approveAndCall(
address _spender,
uint256 _amount,
bytes calldata _extraData
)
external
returns (bool success)
{
require(doApprove(msg.sender, _spender, _amount));
ApproveAndCallFallBack(_spender).receiveApproval(
msg.sender,
_amount,
address(this),
_extraData
);
return true;
}
/**
* @dev This function makes it easy to get the total number of tokens
* @return The total number of tokens
*/
function totalSupply() external view returns (uint) {
return totalSupplyAt(block.number);
}
////////////////
// Query balance and totalSupply in History
////////////////
/**
* @dev Queries the balance of `_owner` at a specific `_blockNumber`
* @param _owner The address from which the balance will be retrieved
* @param _blockNumber The block number when the balance is queried
* @return The balance at `_blockNumber`
*/
function balanceOfAt(
address _owner,
uint _blockNumber
)
public
view
returns (uint)
{
// These next few lines are used when the balance of the token is
// requested before a check point was ever created for this token, it
// requires that the `parentToken.balanceOfAt` be queried at the
// genesis block for that token as this contains initial balance of
// this token
if ((balances[_owner].length == 0) || (balances[_owner][0].fromBlock > _blockNumber)) {
if (address(parentToken) != address(0)) {
return parentToken.balanceOfAt(_owner, min(_blockNumber, parentSnapShotBlock));
} else {
// Has no parent
return 0;
}
// This will return the expected balance during normal situations
} else {
return getValueAt(balances[_owner], _blockNumber);
}
}
/**
* @notice Total amount of tokens at a specific `_blockNumber`.
* @param _blockNumber The block number when the totalSupply is queried
* @return The total amount of tokens at `_blockNumber`
*/
function totalSupplyAt(uint _blockNumber) public view returns(uint) {
// These next few lines are used when the totalSupply of the token is
// requested before a check point was ever created for this token, it
// requires that the `parentToken.totalSupplyAt` be queried at the
// genesis block for this token as that contains totalSupply of this
// token at this block number.
if ((totalSupplyHistory.length == 0) || (totalSupplyHistory[0].fromBlock > _blockNumber)) {
if (address(parentToken) != address(0)) {
return parentToken.totalSupplyAt(min(_blockNumber, parentSnapShotBlock));
} else {
return 0;
}
// This will return the expected totalSupply during normal situations
} else {
return getValueAt(totalSupplyHistory, _blockNumber);
}
}
////////////////
// Clone Token Method
////////////////
/**
* @notice Creates a new clone token with the initial distribution being
* this token at `snapshotBlock`
* @param _cloneTokenName Name of the clone token
* @param _cloneDecimalUnits Number of decimals of the smallest unit
* @param _cloneTokenSymbol Symbol of the clone token
* @param _snapshotBlock Block when the distribution of the parent token is
* copied to set the initial distribution of the new clone token;
* if the block is zero than the actual block, the current block is used
* @param _transfersEnabled True if transfers are allowed in the clone
* @return The address of the new MiniMeToken Contract
*/
function createCloneToken(
string calldata _cloneTokenName,
uint8 _cloneDecimalUnits,
string calldata _cloneTokenSymbol,
uint _snapshotBlock,
bool _transfersEnabled
)
external
returns(address)
{
uint snapshotBlock = _snapshotBlock;
if (snapshotBlock == 0) {
snapshotBlock = block.number;
}
MiniMeToken cloneToken = MiniMeToken(
tokenFactory.createCloneToken(
address(this),
snapshotBlock,
_cloneTokenName,
_cloneDecimalUnits,
_cloneTokenSymbol,
_transfersEnabled
));
cloneToken.changeController(msg.sender);
// An event to make the token easy to find on the blockchain
emit NewCloneToken(address(cloneToken), snapshotBlock);
return address(cloneToken);
}
////////////////
// Generate and destroy tokens
////////////////
/**
* @notice Generates `_amount` tokens that are assigned to `_owner`
* @param _owner The address that will be assigned the new tokens
* @param _amount The quantity of tokens generated
* @return True if the tokens are generated correctly
*/
function generateTokens(
address _owner,
uint _amount
)
external
onlyController
returns (bool)
{
uint curTotalSupply = totalSupplyAt(block.number);
require(curTotalSupply + _amount >= curTotalSupply); // Check for overflow
uint previousBalanceTo = balanceOfAt(_owner, block.number);
require(previousBalanceTo + _amount >= previousBalanceTo); // Check for overflow
updateValueAtNow(totalSupplyHistory, curTotalSupply + _amount);
updateValueAtNow(balances[_owner], previousBalanceTo + _amount);
emit Transfer(address(0), _owner, _amount);
return true;
}
/**
* @notice Burns `_amount` tokens from `_owner`
* @param _owner The address that will lose the tokens
* @param _amount The quantity of tokens to burn
* @return True if the tokens are burned correctly
*/
function destroyTokens(
address _owner,
uint _amount
)
external
onlyController
returns (bool)
{
uint curTotalSupply = totalSupplyAt(block.number);
require(curTotalSupply >= _amount);
uint previousBalanceFrom = balanceOfAt(_owner, block.number);
require(previousBalanceFrom >= _amount);
updateValueAtNow(totalSupplyHistory, curTotalSupply - _amount);
updateValueAtNow(balances[_owner], previousBalanceFrom - _amount);
emit Transfer(_owner, address(0), _amount);
return true;
}
////////////////
// Enable tokens transfers
////////////////
/**
* @notice Enables token holders to transfer their tokens freely if true
* @param _transfersEnabled True if transfers are allowed in the clone
*/
function enableTransfers(bool _transfersEnabled) external onlyController {
transfersEnabled = _transfersEnabled;
}
////////////////
// Internal helper functions to query and set a value in a snapshot array
////////////////
/**
* @dev `getValueAt` retrieves the number of tokens at a given block number
* @param checkpoints The history of values being queried
* @param _block The block number to retrieve the value at
* @return The number of tokens being queried
*/
function getValueAt(
Checkpoint[] storage checkpoints,
uint _block
)
internal
view
returns (uint)
{
if (checkpoints.length == 0) {
return 0;
}
// Shortcut for the actual value
if (_block >= checkpoints[checkpoints.length-1].fromBlock) {
return checkpoints[checkpoints.length-1].value;
}
if (_block < checkpoints[0].fromBlock) {
return 0;
}
// Binary search of the value in the array
uint min = 0;
uint max = checkpoints.length-1;
while (max > min) {
uint mid = (max + min + 1) / 2;
if (checkpoints[mid].fromBlock<=_block) {
min = mid;
} else {
max = mid-1;
}
}
return checkpoints[min].value;
}
/**
* @dev `updateValueAtNow` used to update the `balances` map and the
* `totalSupplyHistory`
* @param checkpoints The history of data being updated
* @param _value The new number of tokens
*/
function updateValueAtNow(Checkpoint[] storage checkpoints, uint _value) internal {
if ((checkpoints.length == 0) || (checkpoints[checkpoints.length - 1].fromBlock < block.number)) {
Checkpoint storage newCheckPoint = checkpoints[checkpoints.length++];
newCheckPoint.fromBlock = uint128(block.number);
newCheckPoint.value = uint128(_value);
} else {
Checkpoint storage oldCheckPoint = checkpoints[checkpoints.length-1];
oldCheckPoint.value = uint128(_value);
}
}
/**
* @dev Internal function to determine if an address is a contract
* @param _addr The address being queried
* @return True if `_addr` is a contract
*/
function isContract(address _addr) internal returns(bool) {
uint size;
if (_addr == address(0)) {
return false;
}
assembly {
size := extcodesize(_addr)
}
return size>0;
}
/**
* @dev Helper function to return a min betwen the two uints
*/
function min(uint a, uint b) internal pure returns (uint) {
return a < b ? a : b;
}
/**
* @notice The fallback function: If the contract's controller has not been
* set to 0, then the `proxyPayment` method is called which relays the
* ether and creates tokens as described in the token controller contract
*/
function () external payable {
require(isContract(controller));
require(TokenController(controller).proxyPayment.value(msg.value)(msg.sender));
}
//////////
// Safety Methods
//////////
/**
* @notice This method can be used by the controller to extract mistakenly
* sent tokens to this contract.
* @param _token The address of the token contract that you want to recover
* set to 0 in case you want to extract ether.
*/
function claimTokens(address _token) external onlyController {
if (_token == address(0)) {
controller.transfer(address(this).balance);
return;
}
MiniMeToken token = MiniMeToken(address(uint160(_token)));
uint balance = token.balanceOf(address(this));
token.transfer(controller, balance);
emit ClaimedTokens(_token, controller, balance);
}
////////////////
// Events
////////////////
event ClaimedTokens(address indexed _token, address indexed _controller, uint _amount);
event Transfer(address indexed _from, address indexed _to, uint256 _amount);
event NewCloneToken(address indexed _cloneToken, uint snapshotBlock);
event Approval(
address indexed _owner,
address indexed _spender,
uint256 _amount
);
}

View File

@ -0,0 +1,48 @@
pragma solidity ^0.5.2;
import "./TokenFactory.sol";
import "./MiniMeToken.sol";
/**
* @dev This contract is used to generate clone contracts from a contract.
* In solidity this is the way to create a contract from a contract of the
* same class
*/
contract MiniMeTokenFactory is TokenFactory {
/**
* @notice Update the DApp by creating a new token with new functionalities
* the msg.sender becomes the controller of this clone token
* @param _parentToken Address of the token being cloned
* @param _snapshotBlock Block of the parent token that will
* determine the initial distribution of the clone token
* @param _tokenName Name of the new token
* @param _decimalUnits Number of decimals of the new token
* @param _tokenSymbol Token Symbol for the new token
* @param _transfersEnabled If true, tokens will be able to be transferred
* @return The address of the new token contract
*/
function createCloneToken(
address _parentToken,
uint _snapshotBlock,
string calldata _tokenName,
uint8 _decimalUnits,
string calldata _tokenSymbol,
bool _transfersEnabled
) external returns (address payable)
{
MiniMeToken newToken = new MiniMeToken(
address(this),
_parentToken,
_snapshotBlock,
_tokenName,
_decimalUnits,
_tokenSymbol,
_transfersEnabled
);
newToken.changeController(msg.sender);
return address(newToken);
}
}

View File

@ -0,0 +1,108 @@
pragma solidity ^0.5.2;
import "./ERC20Token.sol";
contract MiniMeTokenInterface is ERC20Token {
/**
* @notice `msg.sender` approves `_spender` to send `_amount` tokens on
* its behalf, and then a function is triggered in the contract that is
* being approved, `_spender`. This allows users to use their tokens to
* interact with contracts in one function call instead of two
* @param _spender The address of the contract able to transfer the tokens
* @param _amount The amount of tokens to be approved for transfer
* @return True if the function call was successful
*/
function approveAndCall(
address _spender,
uint256 _amount,
bytes calldata _extraData
)
external
returns (bool success);
/**
* @notice Creates a new clone token with the initial distribution being
* this token at `_snapshotBlock`
* @param _cloneTokenName Name of the clone token
* @param _cloneDecimalUnits Number of decimals of the smallest unit
* @param _cloneTokenSymbol Symbol of the clone token
* @param _snapshotBlock Block when the distribution of the parent token is
* copied to set the initial distribution of the new clone token;
* if the block is zero than the actual block, the current block is used
* @param _transfersEnabled True if transfers are allowed in the clone
* @return The address of the new MiniMeToken Contract
*/
function createCloneToken(
string calldata _cloneTokenName,
uint8 _cloneDecimalUnits,
string calldata _cloneTokenSymbol,
uint _snapshotBlock,
bool _transfersEnabled
)
external
returns(address);
/**
* @notice Generates `_amount` tokens that are assigned to `_owner`
* @param _owner The address that will be assigned the new tokens
* @param _amount The quantity of tokens generated
* @return True if the tokens are generated correctly
*/
function generateTokens(
address _owner,
uint _amount
)
external
returns (bool);
/**
* @notice Burns `_amount` tokens from `_owner`
* @param _owner The address that will lose the tokens
* @param _amount The quantity of tokens to burn
* @return True if the tokens are burned correctly
*/
function destroyTokens(
address _owner,
uint _amount
)
external
returns (bool);
/**
* @notice Enables token holders to transfer their tokens freely if true
* @param _transfersEnabled True if transfers are allowed in the clone
*/
function enableTransfers(bool _transfersEnabled) external;
/**
* @notice This method can be used by the controller to extract mistakenly
* sent tokens to this contract.
* @param _token The address of the token contract that you want to recover
* set to 0 in case you want to extract ether.
*/
function claimTokens(address _token) external;
/**
* @dev Queries the balance of `_owner` at a specific `_blockNumber`
* @param _owner The address from which the balance will be retrieved
* @param _blockNumber The block number when the balance is queried
* @return The balance at `_blockNumber`
*/
function balanceOfAt(
address _owner,
uint _blockNumber
)
public
view
returns (uint);
/**
* @notice Total amount of tokens at a specific `_blockNumber`.
* @param _blockNumber The block number when the totalSupply is queried
* @return The total amount of tokens at `_blockNumber`
*/
function totalSupplyAt(uint _blockNumber) public view returns(uint);
}

View File

@ -0,0 +1,35 @@
pragma solidity ^0.5.2;
/**
* @dev The token controller contract must implement these functions
*/
interface TokenController {
/**
* @notice Called when `_owner` sends ether to the MiniMe Token contract
* @param _owner The address that sent the ether to create tokens
* @return True if the ether is accepted, false if it throws
*/
function proxyPayment(address _owner) external payable returns(bool);
/**
* @notice Notifies the controller about a token transfer allowing the
* controller to react if desired
* @param _from The origin of the transfer
* @param _to The destination of the transfer
* @param _amount The amount of the transfer
* @return False if the controller does not authorize the transfer
*/
function onTransfer(address _from, address _to, uint _amount) external returns(bool);
/**
* @notice Notifies the controller about an approval allowing the
* controller to react if desired
* @param _owner The address that calls `approve()`
* @param _spender The spender in the `approve()` call
* @param _amount The amount in the `approve()` call
* @return False if the controller does not authorize the approval
*/
function onApprove(address _owner, address _spender, uint _amount) external
returns(bool);
}

View File

@ -0,0 +1,13 @@
pragma solidity ^0.5.2;
contract TokenFactory {
function createCloneToken(
address _parentToken,
uint _snapshotBlock,
string calldata _tokenName,
uint8 _decimalUnits,
string calldata _tokenSymbol,
bool _transfersEnabled
) external returns (address payable);
}

View File

@ -0,0 +1,506 @@
pragma solidity ^0.5.2;
import "./SafeMath.sol";
contract BancorFormula {
using SafeMath for uint256;
uint256 private constant ONE = 1;
uint8 private constant MIN_PRECISION = 32;
uint8 private constant MAX_PRECISION = 127;
/**
Auto-generated via 'PrintIntScalingFactors.py'
*/
uint256 private constant FIXED_1 = 0x080000000000000000000000000000000;
uint256 private constant FIXED_2 = 0x100000000000000000000000000000000;
uint256 private constant MAX_NUM = 0x200000000000000000000000000000000;
/**
Auto-generated via 'PrintLn2ScalingFactors.py'
*/
uint256 private constant LN2_NUMERATOR = 0x3f80fe03f80fe03f80fe03f80fe03f8;
uint256 private constant LN2_DENOMINATOR = 0x5b9de1d10bf4103d647b0955897ba80;
/**
Auto-generated via 'PrintFunctionOptimalLog.py' and 'PrintFunctionOptimalExp.py'
*/
uint256 private constant OPT_LOG_MAX_VAL = 0x15bf0a8b1457695355fb8ac404e7a79e3;
uint256 private constant OPT_EXP_MAX_VAL = 0x800000000000000000000000000000000;
/**
Auto-generated via 'PrintFunctionConstructor.py'
*/
uint256[128] private maxExpArray;
constructor() public {
// maxExpArray[0] = 0x6bffffffffffffffffffffffffffffffff;
// maxExpArray[1] = 0x67ffffffffffffffffffffffffffffffff;
// maxExpArray[2] = 0x637fffffffffffffffffffffffffffffff;
// maxExpArray[3] = 0x5f6fffffffffffffffffffffffffffffff;
// maxExpArray[4] = 0x5b77ffffffffffffffffffffffffffffff;
// maxExpArray[5] = 0x57b3ffffffffffffffffffffffffffffff;
// maxExpArray[6] = 0x5419ffffffffffffffffffffffffffffff;
// maxExpArray[7] = 0x50a2ffffffffffffffffffffffffffffff;
// maxExpArray[8] = 0x4d517fffffffffffffffffffffffffffff;
// maxExpArray[9] = 0x4a233fffffffffffffffffffffffffffff;
// maxExpArray[10] = 0x47165fffffffffffffffffffffffffffff;
// maxExpArray[11] = 0x4429afffffffffffffffffffffffffffff;
// maxExpArray[12] = 0x415bc7ffffffffffffffffffffffffffff;
// maxExpArray[13] = 0x3eab73ffffffffffffffffffffffffffff;
// maxExpArray[14] = 0x3c1771ffffffffffffffffffffffffffff;
// maxExpArray[15] = 0x399e96ffffffffffffffffffffffffffff;
// maxExpArray[16] = 0x373fc47fffffffffffffffffffffffffff;
// maxExpArray[17] = 0x34f9e8ffffffffffffffffffffffffffff;
// maxExpArray[18] = 0x32cbfd5fffffffffffffffffffffffffff;
// maxExpArray[19] = 0x30b5057fffffffffffffffffffffffffff;
// maxExpArray[20] = 0x2eb40f9fffffffffffffffffffffffffff;
// maxExpArray[21] = 0x2cc8340fffffffffffffffffffffffffff;
// maxExpArray[22] = 0x2af09481ffffffffffffffffffffffffff;
// maxExpArray[23] = 0x292c5bddffffffffffffffffffffffffff;
// maxExpArray[24] = 0x277abdcdffffffffffffffffffffffffff;
// maxExpArray[25] = 0x25daf6657fffffffffffffffffffffffff;
// maxExpArray[26] = 0x244c49c65fffffffffffffffffffffffff;
// maxExpArray[27] = 0x22ce03cd5fffffffffffffffffffffffff;
// maxExpArray[28] = 0x215f77c047ffffffffffffffffffffffff;
// maxExpArray[29] = 0x1fffffffffffffffffffffffffffffffff;
// maxExpArray[30] = 0x1eaefdbdabffffffffffffffffffffffff;
// maxExpArray[31] = 0x1d6bd8b2ebffffffffffffffffffffffff;
maxExpArray[32] = 0x1c35fedd14ffffffffffffffffffffffff;
maxExpArray[33] = 0x1b0ce43b323fffffffffffffffffffffff;
maxExpArray[34] = 0x19f0028ec1ffffffffffffffffffffffff;
maxExpArray[35] = 0x18ded91f0e7fffffffffffffffffffffff;
maxExpArray[36] = 0x17d8ec7f0417ffffffffffffffffffffff;
maxExpArray[37] = 0x16ddc6556cdbffffffffffffffffffffff;
maxExpArray[38] = 0x15ecf52776a1ffffffffffffffffffffff;
maxExpArray[39] = 0x15060c256cb2ffffffffffffffffffffff;
maxExpArray[40] = 0x1428a2f98d72ffffffffffffffffffffff;
maxExpArray[41] = 0x13545598e5c23fffffffffffffffffffff;
maxExpArray[42] = 0x1288c4161ce1dfffffffffffffffffffff;
maxExpArray[43] = 0x11c592761c666fffffffffffffffffffff;
maxExpArray[44] = 0x110a688680a757ffffffffffffffffffff;
maxExpArray[45] = 0x1056f1b5bedf77ffffffffffffffffffff;
maxExpArray[46] = 0x0faadceceeff8bffffffffffffffffffff;
maxExpArray[47] = 0x0f05dc6b27edadffffffffffffffffffff;
maxExpArray[48] = 0x0e67a5a25da4107fffffffffffffffffff;
maxExpArray[49] = 0x0dcff115b14eedffffffffffffffffffff;
maxExpArray[50] = 0x0d3e7a392431239fffffffffffffffffff;
maxExpArray[51] = 0x0cb2ff529eb71e4fffffffffffffffffff;
maxExpArray[52] = 0x0c2d415c3db974afffffffffffffffffff;
maxExpArray[53] = 0x0bad03e7d883f69bffffffffffffffffff;
maxExpArray[54] = 0x0b320d03b2c343d5ffffffffffffffffff;
maxExpArray[55] = 0x0abc25204e02828dffffffffffffffffff;
maxExpArray[56] = 0x0a4b16f74ee4bb207fffffffffffffffff;
maxExpArray[57] = 0x09deaf736ac1f569ffffffffffffffffff;
maxExpArray[58] = 0x0976bd9952c7aa957fffffffffffffffff;
maxExpArray[59] = 0x09131271922eaa606fffffffffffffffff;
maxExpArray[60] = 0x08b380f3558668c46fffffffffffffffff;
maxExpArray[61] = 0x0857ddf0117efa215bffffffffffffffff;
maxExpArray[62] = 0x07ffffffffffffffffffffffffffffffff;
maxExpArray[63] = 0x07abbf6f6abb9d087fffffffffffffffff;
maxExpArray[64] = 0x075af62cbac95f7dfa7fffffffffffffff;
maxExpArray[65] = 0x070d7fb7452e187ac13fffffffffffffff;
maxExpArray[66] = 0x06c3390ecc8af379295fffffffffffffff;
maxExpArray[67] = 0x067c00a3b07ffc01fd6fffffffffffffff;
maxExpArray[68] = 0x0637b647c39cbb9d3d27ffffffffffffff;
maxExpArray[69] = 0x05f63b1fc104dbd39587ffffffffffffff;
maxExpArray[70] = 0x05b771955b36e12f7235ffffffffffffff;
maxExpArray[71] = 0x057b3d49dda84556d6f6ffffffffffffff;
maxExpArray[72] = 0x054183095b2c8ececf30ffffffffffffff;
maxExpArray[73] = 0x050a28be635ca2b888f77fffffffffffff;
maxExpArray[74] = 0x04d5156639708c9db33c3fffffffffffff;
maxExpArray[75] = 0x04a23105873875bd52dfdfffffffffffff;
maxExpArray[76] = 0x0471649d87199aa990756fffffffffffff;
maxExpArray[77] = 0x04429a21a029d4c1457cfbffffffffffff;
maxExpArray[78] = 0x0415bc6d6fb7dd71af2cb3ffffffffffff;
maxExpArray[79] = 0x03eab73b3bbfe282243ce1ffffffffffff;
maxExpArray[80] = 0x03c1771ac9fb6b4c18e229ffffffffffff;
maxExpArray[81] = 0x0399e96897690418f785257fffffffffff;
maxExpArray[82] = 0x0373fc456c53bb779bf0ea9fffffffffff;
maxExpArray[83] = 0x034f9e8e490c48e67e6ab8bfffffffffff;
maxExpArray[84] = 0x032cbfd4a7adc790560b3337ffffffffff;
maxExpArray[85] = 0x030b50570f6e5d2acca94613ffffffffff;
maxExpArray[86] = 0x02eb40f9f620fda6b56c2861ffffffffff;
maxExpArray[87] = 0x02cc8340ecb0d0f520a6af58ffffffffff;
maxExpArray[88] = 0x02af09481380a0a35cf1ba02ffffffffff;
maxExpArray[89] = 0x0292c5bdd3b92ec810287b1b3fffffffff;
maxExpArray[90] = 0x0277abdcdab07d5a77ac6d6b9fffffffff;
maxExpArray[91] = 0x025daf6654b1eaa55fd64df5efffffffff;
maxExpArray[92] = 0x0244c49c648baa98192dce88b7ffffffff;
maxExpArray[93] = 0x022ce03cd5619a311b2471268bffffffff;
maxExpArray[94] = 0x0215f77c045fbe885654a44a0fffffffff;
maxExpArray[95] = 0x01ffffffffffffffffffffffffffffffff;
maxExpArray[96] = 0x01eaefdbdaaee7421fc4d3ede5ffffffff;
maxExpArray[97] = 0x01d6bd8b2eb257df7e8ca57b09bfffffff;
maxExpArray[98] = 0x01c35fedd14b861eb0443f7f133fffffff;
maxExpArray[99] = 0x01b0ce43b322bcde4a56e8ada5afffffff;
maxExpArray[100] = 0x019f0028ec1fff007f5a195a39dfffffff;
maxExpArray[101] = 0x018ded91f0e72ee74f49b15ba527ffffff;
maxExpArray[102] = 0x017d8ec7f04136f4e5615fd41a63ffffff;
maxExpArray[103] = 0x016ddc6556cdb84bdc8d12d22e6fffffff;
maxExpArray[104] = 0x015ecf52776a1155b5bd8395814f7fffff;
maxExpArray[105] = 0x015060c256cb23b3b3cc3754cf40ffffff;
maxExpArray[106] = 0x01428a2f98d728ae223ddab715be3fffff;
maxExpArray[107] = 0x013545598e5c23276ccf0ede68034fffff;
maxExpArray[108] = 0x01288c4161ce1d6f54b7f61081194fffff;
maxExpArray[109] = 0x011c592761c666aa641d5a01a40f17ffff;
maxExpArray[110] = 0x0110a688680a7530515f3e6e6cfdcdffff;
maxExpArray[111] = 0x01056f1b5bedf75c6bcb2ce8aed428ffff;
maxExpArray[112] = 0x00faadceceeff8a0890f3875f008277fff;
maxExpArray[113] = 0x00f05dc6b27edad306388a600f6ba0bfff;
maxExpArray[114] = 0x00e67a5a25da41063de1495d5b18cdbfff;
maxExpArray[115] = 0x00dcff115b14eedde6fc3aa5353f2e4fff;
maxExpArray[116] = 0x00d3e7a3924312399f9aae2e0f868f8fff;
maxExpArray[117] = 0x00cb2ff529eb71e41582cccd5a1ee26fff;
maxExpArray[118] = 0x00c2d415c3db974ab32a51840c0b67edff;
maxExpArray[119] = 0x00bad03e7d883f69ad5b0a186184e06bff;
maxExpArray[120] = 0x00b320d03b2c343d4829abd6075f0cc5ff;
maxExpArray[121] = 0x00abc25204e02828d73c6e80bcdb1a95bf;
maxExpArray[122] = 0x00a4b16f74ee4bb2040a1ec6c15fbbf2df;
maxExpArray[123] = 0x009deaf736ac1f569deb1b5ae3f36c130f;
maxExpArray[124] = 0x00976bd9952c7aa957f5937d790ef65037;
maxExpArray[125] = 0x009131271922eaa6064b73a22d0bd4f2bf;
maxExpArray[126] = 0x008b380f3558668c46c91c49a2f8e967b9;
maxExpArray[127] = 0x00857ddf0117efa215952912839f6473e6;
}
/**
General Description:
Determine a value of precision.
Calculate an integer approximation of (_baseN / _baseD) ^ (_expN / _expD) * 2 ^ precision.
Return the result along with the precision used.
Detailed Description:
Instead of calculating "base ^ exp", we calculate "e ^ (log(base) * exp)".
The value of "log(base)" is represented with an integer slightly smaller than "log(base) * 2 ^ precision".
The larger "precision" is, the more accurately this value represents the real value.
However, the larger "precision" is, the more bits are required in order to store this value.
And the exponentiation function, which takes "x" and calculates "e ^ x", is limited to a maximum exponent (maximum value of "x").
This maximum exponent depends on the "precision" used, and it is given by "maxExpArray[precision] >> (MAX_PRECISION - precision)".
Hence we need to determine the highest precision which can be used for the given input, before calling the exponentiation function.
This allows us to compute "base ^ exp" with maximum accuracy and without exceeding 256 bits in any of the intermediate computations.
This functions assumes that "_expN < 2 ^ 256 / log(MAX_NUM - 1)", otherwise the multiplication should be replaced with a "safeMul".
*/
function power(
uint256 _baseN,
uint256 _baseD,
uint32 _expN,
uint32 _expD) internal view returns (uint256, uint8)
{
require(_baseN < MAX_NUM, "SNT available is invalid");
uint256 baseLog;
uint256 base = _baseN * FIXED_1 / _baseD;
if (base < OPT_LOG_MAX_VAL) {
baseLog = optimalLog(base);
} else {
baseLog = generalLog(base);
}
uint256 baseLogTimesExp = baseLog * _expN / _expD;
if (baseLogTimesExp < OPT_EXP_MAX_VAL) {
return (optimalExp(baseLogTimesExp), MAX_PRECISION);
} else {
uint8 precision = findPositionInMaxExpArray(baseLogTimesExp);
return (generalExp(baseLogTimesExp >> (MAX_PRECISION - precision), precision), precision);
}
}
/**
Compute log(x / FIXED_1) * FIXED_1.
This functions assumes that "x >= FIXED_1", because the output would be negative otherwise.
*/
function generalLog(uint256 x) internal pure returns (uint256) {
uint256 res = 0;
// If x >= 2, then we compute the integer part of log2(x), which is larger than 0.
if (x >= FIXED_2) {
uint8 count = floorLog2(x / FIXED_1);
x >>= count; // now x < 2
res = count * FIXED_1;
}
// If x > 1, then we compute the fraction part of log2(x), which is larger than 0.
if (x > FIXED_1) {
for (uint8 i = MAX_PRECISION; i > 0; --i) {
x = (x * x) / FIXED_1; // now 1 < x < 4
if (x >= FIXED_2) {
x >>= 1; // now 1 < x < 2
res += ONE << (i - 1);
}
}
}
return res * LN2_NUMERATOR / LN2_DENOMINATOR;
}
/**
Compute the largest integer smaller than or equal to the binary logarithm of the input.
*/
function floorLog2(uint256 _n) internal pure returns (uint8) {
uint8 res = 0;
if (_n < 256) {
// At most 8 iterations
while (_n > 1) {
_n >>= 1;
res += 1;
}
} else {
// Exactly 8 iterations
for (uint8 s = 128; s > 0; s >>= 1) {
if (_n >= (ONE << s)) {
_n >>= s;
res |= s;
}
}
}
return res;
}
/**
The global "maxExpArray" is sorted in descending order, and therefore the following statements are equivalent:
- This function finds the position of [the smallest value in "maxExpArray" larger than or equal to "x"]
- This function finds the highest position of [a value in "maxExpArray" larger than or equal to "x"]
*/
function findPositionInMaxExpArray(uint256 _x) internal view returns (uint8) {
uint8 lo = MIN_PRECISION;
uint8 hi = MAX_PRECISION;
while (lo + 1 < hi) {
uint8 mid = (lo + hi) / 2;
if (maxExpArray[mid] >= _x) {
lo = mid;
} else {
hi = mid;
}
}
if (maxExpArray[hi] >= _x)
return hi;
if (maxExpArray[lo] >= _x)
return lo;
require(false, "Could not find a suitable position");
return 0;
}
/**
This function can be auto-generated by the script 'PrintFunctionGeneralExp.py'.
It approximates "e ^ x" via maclaurin summation: "(x^0)/0! + (x^1)/1! + ... + (x^n)/n!".
It returns "e ^ (x / 2 ^ precision) * 2 ^ precision", that is, the result is upshifted for accuracy.
The global "maxExpArray" maps each "precision" to "((maximumExponent + 1) << (MAX_PRECISION - precision)) - 1".
The maximum permitted value for "x" is therefore given by "maxExpArray[precision] >> (MAX_PRECISION - precision)".
*/
function generalExp(uint256 _x, uint8 _precision) internal pure returns (uint256) {
uint256 xi = _x;
uint256 res = 0;
xi = (xi * _x) >> _precision;
res += xi * 0x3442c4e6074a82f1797f72ac0000000; // add x^02 * (33! / 02!)
xi = (xi * _x) >> _precision;
res += xi * 0x116b96f757c380fb287fd0e40000000; // add x^03 * (33! / 03!)
xi = (xi * _x) >> _precision;
res += xi * 0x045ae5bdd5f0e03eca1ff4390000000; // add x^04 * (33! / 04!)
xi = (xi * _x) >> _precision;
res += xi * 0x00defabf91302cd95b9ffda50000000; // add x^05 * (33! / 05!)
xi = (xi * _x) >> _precision;
res += xi * 0x002529ca9832b22439efff9b8000000; // add x^06 * (33! / 06!)
xi = (xi * _x) >> _precision;
res += xi * 0x00054f1cf12bd04e516b6da88000000; // add x^07 * (33! / 07!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000a9e39e257a09ca2d6db51000000; // add x^08 * (33! / 08!)
xi = (xi * _x) >> _precision;
res += xi * 0x000012e066e7b839fa050c309000000; // add x^09 * (33! / 09!)
xi = (xi * _x) >> _precision;
res += xi * 0x000001e33d7d926c329a1ad1a800000; // add x^10 * (33! / 10!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000002bee513bdb4a6b19b5f800000; // add x^11 * (33! / 11!)
xi = (xi * _x) >> _precision;
res += xi * 0x00000003a9316fa79b88eccf2a00000; // add x^12 * (33! / 12!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000000048177ebe1fa812375200000; // add x^13 * (33! / 13!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000000005263fe90242dcbacf00000; // add x^14 * (33! / 14!)
xi = (xi * _x) >> _precision;
res += xi * 0x000000000057e22099c030d94100000; // add x^15 * (33! / 15!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000000000057e22099c030d9410000; // add x^16 * (33! / 16!)
xi = (xi * _x) >> _precision;
res += xi * 0x00000000000052b6b54569976310000; // add x^17 * (33! / 17!)
xi = (xi * _x) >> _precision;
res += xi * 0x00000000000004985f67696bf748000; // add x^18 * (33! / 18!)
xi = (xi * _x) >> _precision;
res += xi * 0x000000000000003dea12ea99e498000; // add x^19 * (33! / 19!)
xi = (xi * _x) >> _precision;
res += xi * 0x00000000000000031880f2214b6e000; // add x^20 * (33! / 20!)
xi = (xi * _x) >> _precision;
res += xi * 0x000000000000000025bcff56eb36000; // add x^21 * (33! / 21!)
xi = (xi * _x) >> _precision;
res += xi * 0x000000000000000001b722e10ab1000; // add x^22 * (33! / 22!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000000000000000001317c70077000; // add x^23 * (33! / 23!)
xi = (xi * _x) >> _precision;
res += xi * 0x00000000000000000000cba84aafa00; // add x^24 * (33! / 24!)
xi = (xi * _x) >> _precision;
res += xi * 0x00000000000000000000082573a0a00; // add x^25 * (33! / 25!)
xi = (xi * _x) >> _precision;
res += xi * 0x00000000000000000000005035ad900; // add x^26 * (33! / 26!)
xi = (xi * _x) >> _precision;
res += xi * 0x000000000000000000000002f881b00; // add x^27 * (33! / 27!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000000000000000000000001b29340; // add x^28 * (33! / 28!)
xi = (xi * _x) >> _precision;
res += xi * 0x00000000000000000000000000efc40; // add x^29 * (33! / 29!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000000000000000000000000007fe0; // add x^30 * (33! / 30!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000000000000000000000000000420; // add x^31 * (33! / 31!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000000000000000000000000000021; // add x^32 * (33! / 32!)
xi = (xi * _x) >> _precision;
res += xi * 0x0000000000000000000000000000001; // add x^33 * (33! / 33!)
return res / 0x688589cc0e9505e2f2fee5580000000 + _x + (ONE << _precision); // divide by 33! and then add x^1 / 1! + x^0 / 0!
}
/**
Return log(x / FIXED_1) * FIXED_1
Input range: FIXED_1 <= x <= LOG_EXP_MAX_VAL - 1
Auto-generated via 'PrintFunctionOptimalLog.py'
Detailed description:
- Rewrite the input as a product of natural exponents and a single residual r, such that 1 < r < 2
- The natural logarithm of each (pre-calculated) exponent is the degree of the exponent
- The natural logarithm of r is calculated via Taylor series for log(1 + x), where x = r - 1
- The natural logarithm of the input is calculated by summing up the intermediate results above
- For example: log(250) = log(e^4 * e^1 * e^0.5 * 1.021692859) = 4 + 1 + 0.5 + log(1 + 0.021692859)
*/
function optimalLog(uint256 x) internal pure returns (uint256) {
uint256 res = 0;
uint256 y = 0;
uint256 z;
uint256 w;
if (x >= 0xd3094c70f034de4b96ff7d5b6f99fcd8) {
res += 0x40000000000000000000000000000000;
x = x * FIXED_1 / 0xd3094c70f034de4b96ff7d5b6f99fcd8;} // add 1 / 2^1
if (x >= 0xa45af1e1f40c333b3de1db4dd55f29a7) {
res += 0x20000000000000000000000000000000;
x = x * FIXED_1 / 0xa45af1e1f40c333b3de1db4dd55f29a7;} // add 1 / 2^2
if (x >= 0x910b022db7ae67ce76b441c27035c6a1) {
res += 0x10000000000000000000000000000000;
x = x * FIXED_1 / 0x910b022db7ae67ce76b441c27035c6a1;} // add 1 / 2^3
if (x >= 0x88415abbe9a76bead8d00cf112e4d4a8) {
res += 0x08000000000000000000000000000000;
x = x * FIXED_1 / 0x88415abbe9a76bead8d00cf112e4d4a8;} // add 1 / 2^4
if (x >= 0x84102b00893f64c705e841d5d4064bd3) {
res += 0x04000000000000000000000000000000;
x = x * FIXED_1 / 0x84102b00893f64c705e841d5d4064bd3;} // add 1 / 2^5
if (x >= 0x8204055aaef1c8bd5c3259f4822735a2) {
res += 0x02000000000000000000000000000000;
x = x * FIXED_1 / 0x8204055aaef1c8bd5c3259f4822735a2;} // add 1 / 2^6
if (x >= 0x810100ab00222d861931c15e39b44e99) {
res += 0x01000000000000000000000000000000;
x = x * FIXED_1 / 0x810100ab00222d861931c15e39b44e99;} // add 1 / 2^7
if (x >= 0x808040155aabbbe9451521693554f733) {
res += 0x00800000000000000000000000000000;
x = x * FIXED_1 / 0x808040155aabbbe9451521693554f733;} // add 1 / 2^8
z = y = x - FIXED_1;
w = y * y / FIXED_1;
res += z * (0x100000000000000000000000000000000 - y) / 0x100000000000000000000000000000000;
z = z * w / FIXED_1; // add y^01 / 01 - y^02 / 02
res += z * (0x0aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa - y) / 0x200000000000000000000000000000000;
z = z * w / FIXED_1; // add y^03 / 03 - y^04 / 04
res += z * (0x099999999999999999999999999999999 - y) / 0x300000000000000000000000000000000;
z = z * w / FIXED_1; // add y^05 / 05 - y^06 / 06
res += z * (0x092492492492492492492492492492492 - y) / 0x400000000000000000000000000000000;
z = z * w / FIXED_1; // add y^07 / 07 - y^08 / 08
res += z * (0x08e38e38e38e38e38e38e38e38e38e38e - y) / 0x500000000000000000000000000000000;
z = z * w / FIXED_1; // add y^09 / 09 - y^10 / 10
res += z * (0x08ba2e8ba2e8ba2e8ba2e8ba2e8ba2e8b - y) / 0x600000000000000000000000000000000;
z = z * w / FIXED_1; // add y^11 / 11 - y^12 / 12
res += z * (0x089d89d89d89d89d89d89d89d89d89d89 - y) / 0x700000000000000000000000000000000;
z = z * w / FIXED_1; // add y^13 / 13 - y^14 / 14
res += z * (0x088888888888888888888888888888888 - y) / 0x800000000000000000000000000000000;
// add y^15 / 15 - y^16 / 16
return res;
}
/**
Return e ^ (x / FIXED_1) * FIXED_1
Input range: 0 <= x <= OPT_EXP_MAX_VAL - 1
Auto-generated via 'PrintFunctionOptimalExp.py'
Detailed description:
- Rewrite the input as a sum of binary exponents and a single residual r, as small as possible
- The exponentiation of each binary exponent is given (pre-calculated)
- The exponentiation of r is calculated via Taylor series for e^x, where x = r
- The exponentiation of the input is calculated by multiplying the intermediate results above
- For example: e^5.521692859 = e^(4 + 1 + 0.5 + 0.021692859) = e^4 * e^1 * e^0.5 * e^0.021692859
*/
function optimalExp(uint256 x) internal pure returns (uint256) {
uint256 res = 0;
uint256 y = 0;
uint256 z;
z = y = x % 0x10000000000000000000000000000000; // get the input modulo 2^(-3)
z = z * y / FIXED_1;
res += z * 0x10e1b3be415a0000; // add y^02 * (20! / 02!)
z = z * y / FIXED_1;
res += z * 0x05a0913f6b1e0000; // add y^03 * (20! / 03!)
z = z * y / FIXED_1;
res += z * 0x0168244fdac78000; // add y^04 * (20! / 04!)
z = z * y / FIXED_1;
res += z * 0x004807432bc18000; // add y^05 * (20! / 05!)
z = z * y / FIXED_1;
res += z * 0x000c0135dca04000; // add y^06 * (20! / 06!)
z = z * y / FIXED_1;
res += z * 0x0001b707b1cdc000; // add y^07 * (20! / 07!)
z = z * y / FIXED_1;
res += z * 0x000036e0f639b800; // add y^08 * (20! / 08!)
z = z * y / FIXED_1;
res += z * 0x00000618fee9f800; // add y^09 * (20! / 09!)
z = z * y / FIXED_1;
res += z * 0x0000009c197dcc00; // add y^10 * (20! / 10!)
z = z * y / FIXED_1;
res += z * 0x0000000e30dce400; // add y^11 * (20! / 11!)
z = z * y / FIXED_1;
res += z * 0x000000012ebd1300; // add y^12 * (20! / 12!)
z = z * y / FIXED_1;
res += z * 0x0000000017499f00; // add y^13 * (20! / 13!)
z = z * y / FIXED_1;
res += z * 0x0000000001a9d480; // add y^14 * (20! / 14!)
z = z * y / FIXED_1;
res += z * 0x00000000001c6380; // add y^15 * (20! / 15!)
z = z * y / FIXED_1;
res += z * 0x000000000001c638; // add y^16 * (20! / 16!)
z = z * y / FIXED_1;
res += z * 0x0000000000001ab8; // add y^17 * (20! / 17!)
z = z * y / FIXED_1;
res += z * 0x000000000000017c; // add y^18 * (20! / 18!)
z = z * y / FIXED_1;
res += z * 0x0000000000000014; // add y^19 * (20! / 19!)
z = z * y / FIXED_1;
res += z * 0x0000000000000001; // add y^20 * (20! / 20!)
res = res / 0x21c3677c82b40000 + y + FIXED_1; // divide by 20! and then add y^1 / 1! + y^0 / 0!
if ((x & 0x010000000000000000000000000000000) != 0)
res = res * 0x1c3d6a24ed82218787d624d3e5eba95f9 / 0x18ebef9eac820ae8682b9793ac6d1e776; // multiply by e^2^(-3)
if ((x & 0x020000000000000000000000000000000) != 0)
res = res * 0x18ebef9eac820ae8682b9793ac6d1e778 / 0x1368b2fc6f9609fe7aceb46aa619baed4; // multiply by e^2^(-2)
if ((x & 0x040000000000000000000000000000000) != 0)
res = res * 0x1368b2fc6f9609fe7aceb46aa619baed5 / 0x0bc5ab1b16779be3575bd8f0520a9f21f; // multiply by e^2^(-1)
if ((x & 0x080000000000000000000000000000000) != 0)
res = res * 0x0bc5ab1b16779be3575bd8f0520a9f21e / 0x0454aaa8efe072e7f6ddbab84b40a55c9; // multiply by e^2^(+0)
if ((x & 0x100000000000000000000000000000000) != 0)
res = res * 0x0454aaa8efe072e7f6ddbab84b40a55c5 / 0x00960aadc109e7a3bf4578099615711ea; // multiply by e^2^(+1)
if ((x & 0x200000000000000000000000000000000) != 0)
res = res * 0x00960aadc109e7a3bf4578099615711d7 / 0x0002bf84208204f5977f9a8cf01fdce3d; // multiply by e^2^(+2)
if ((x & 0x400000000000000000000000000000000) != 0)
res = res * 0x0002bf84208204f5977f9a8cf01fdc307 / 0x0000003c6ab775dd0b95b4cbee7e65d11; // multiply by e^2^(+3)
return res;
}
}

View File

@ -0,0 +1,56 @@
pragma solidity ^0.5.2;
library SafeMath {
/**
@dev returns the sum of _x and _y, reverts if the calculation overflows
@param _x value 1
@param _y value 2
@return sum
*/
function add(uint256 _x, uint256 _y) internal pure returns (uint256) {
uint256 z = _x + _y;
require(z >= _x, "SafeMath failed");
return z;
}
/**
@dev returns the difference of _x minus _y, reverts if the calculation underflows
@param _x minuend
@param _y subtrahend
@return difference
*/
function sub(uint256 _x, uint256 _y) internal pure returns (uint256) {
require(_x >= _y, "SafeMath failed");
return _x - _y;
}
/**
@dev returns the product of multiplying _x by _y, reverts if the calculation overflows
@param _x factor 1
@param _y factor 2
@return product
*/
function mul(uint256 _x, uint256 _y) internal pure returns (uint256) {
// gas optimization
if (_x == 0)
return 0;
uint256 z = _x * _y;
require(z / _x == _y, "SafeMath failed");
return z;
}
/**
@dev Integer division of two numbers truncating the quotient, reverts on division by zero.
@param _x dividend
@param _y divisor
@return quotient
*/
function div(uint256 _x, uint256 _y) internal pure returns (uint256) {
require(_y > 0, "SafeMath failed");
uint256 c = _x / _y;
return c;
}
}

18
Contracts/package.json Normal file
View File

@ -0,0 +1,18 @@
{
"name": "dap.ps-contracts",
"version": "1.0.0",
"description": "Embark contracts",
"main": "index.js",
"scripts": {
"build:testnet": "embark build testnet",
"build:development": "embark build development",
"build": "embark build livenet",
"start:dev": "yarn build:testnet",
"start:prod": "yarn build",
"slither": "slither . --exclude naming-convention --filter-paths token"
},
"license": "ISC",
"dependencies": {
}
}

182
Discover_Specification.md Normal file
View File

@ -0,0 +1,182 @@
# Discover SNT Ranking
## Summary
In order to fulfill one of our whitepaper promises, we need a mechanism that uses SNT to curate DApps. While this is not the only mechanism we will make available to users to find interesting and relevant DApps, it is one of the most important, both for SNT utility and because economic mechanisms are at the heart of how we buidl sustainable peer-to-peer networks.
## Abstract
We propose using an exponential [bonded curve](https://beta.observablehq.com/@andytudhope/dapp-store-snt-curation-mechanism), which operates only on downvotes, to implement a simple ranking game. It is the most radical market feasible: the more SNT a DApp stakes, the higher it ranks, with one caveat. The more SNT staked, the cheaper it is for the community to move that DApp down the rankings.
## Motivation
Token Curated Registries, and other bonded curve implementations try to incentivise the user with some kind of fungible reward token (often with governance rights/requirements attached to it) in order to decentralise the curation of interesting information. However, this creates mental overhead for users (who must manage multiple tokens, all with different on-chain transactions required) and is unlikely to see high adoption.
Making the ranking algorithm transparent - and giving users an ability to affect it at a small cost to them should they feel very strongly - is potentially a more effective way to achieve decentralised curation.
## User Stories
An effective economic ranking mechanism, selected with the option `Ranked by SNT` (one of many filters), answers the following user stories from our [swarm doc](https://github.com/status-im/swarms/blob/master/ideas/317-dapps-store.md).
1. **I want to be confident a DApp is usable / not a scam.**
1. Having an economic mechanism ensures that the DApps which rank highly quite literally are those providing the "most value" to the community. This is because SNT staked to rank is locked out of circulation, meaning each SNT stakeholder's own holding of SNT should increase in value. Coincidentally, the more SNT staked in total in the store, the stronger the assurance that any given DApp which ranks highly is useful and not a scam.
2. **As an SNT stakeholder, I would like to signal using SNT that I find a listing useful.**
1. Achieved by "upvoting" in the UI. Importantly, upvotes do not effect the bonded curve, users simply donate SNT 1-1 directly to the DApp's `balance`.
3. **As an SNT stakeholder, I would like to signal using SNT that I find a listing to be not useful/poor quality/etc.**
1. Achieved, on an increasingly cheap basis the more well-resourced a DApp is, by "downvoting" in the UI. Uses an exponential bonded curve to mint downvotes.
4. **As a DApp developer, I want to be able to propose/vote my DApp for inclusion.**
1. Anybody can submit a DApp for inclusion and "vote" on it by calling `upvote` and adding SNT to its `balance`.
## Specification
#### Constants
1. `uint total` - total SNT in circulation.
2. `uint ceiling` - most influential parameter for [_shape_ of curves](https://beta.observablehq.com/@andytudhope/dapp-store-snt-curation-mechanism).
3. `uint max` - max SNT that any one DApp can stake.
4. `uint decimals` - the amount of decimal precision to use for the calculating `max`.
5. `uint safeMax` - protect against overflows into infinity in votesMinted.
#### Data Struct
1. `address developer` - the developer of the DApp, used to send SNT to when `downvote` or `withdraw` is called.
2. `bytes32 id` - a unique identifier for each DApp, potentially with other metadata associated with it, hence the `bytes32`.
3. `bytes metadata` - the name, url, category and IPFS hash of the DApp so that we can resolve it in the store correctly.
4. `uint balance` - keep track of the total staked on each DApp.
5. `uint rate = 1 - (balance/max)` - used to calculate `available` and `votesMinted`.
6. `uint available = balance * rate` - amount of SNT staked a developer can earn back. NB: this is equivalent to the `cost` of all downvotes.
7. `uint votesMinted = available ** (1/rate)` - total downvotes that are "minted".
8. `uint votesCast` - keep track of the downvotes already cast.
9. `uint effectiveBalance = balance - ((votesCast/(1/rate))*(available/votesMinted))`- the Effective Balance each DApp is actually ranked by in the UI.
### Constructor
1. Sets the address for the SNT contract based on arg passed in.
1. `uint total == 6804870174`
1. `uint ceiling = 292`, as this means the max is close to 2M SNT, and is a local minima for votesMinted.
1. `uint decimals = 1000000` - We're use 1/100th of the total SNT in circulation as our bound, based mostly on Twitter polls...
1. `uint max = (total * ceiling)/decimals`
1. `uint safeMax = 77 * max / 100` - 77% of the absolute max, due to limitations with bancor's power approximations in Solidity.
#### Methods
1. **createDapp** external
1. params: `(bytes32 _id, uint _amount)`
Calls internal method `_createDApp`, passing in `msg.sender`, `_id` and `_amount`.
2. **upvote** external
1. params:`(bytes32 _id, uint _amount)`
Calls internal method `_upvote`, passing in `msg.sender`, `_id` and `_amount`.
3. **downvote** external
1. params: `bytes32 _id, uint _amount`
Calls `downvoteCost` to check the `_amount`, then calls internal method `_downvote`, passing in `msg.sender`, `_id` and `_amount`.
4. **withdraw** external
1. params: `(bytes32 _id, uint _amount)`
Allow developers to reduce thier stake/exit the store provided that `_amount <= available`. Recalculate `balance`, `rate`, `available` and `votesMinted`. If `votesCast > votesMinted`, then set them equal so the maths is future-proof, and recalculate `effectiveBalance`.
Emit event containing new `effectiveBalance`.
5. **setMetadata** external
1. params: `(bytes32 _id, bytes calldata _metadata)`
Checks that the person trying to set/update the metadata is the developer, then updates the metadata associated with the DApp at that `id` so that we can resolve it correctly client side.
7. **receiveApproval** external
1. params: `(address _from, uint256 _amount, address _token, bytes _data)`
Included so that users need only sign one transaction when creating a DApp, upvoting or downvoting. Checks that the token (SNT), sender, and data are correct. Decodes the `_data` using `abiDecodeRegister`, checks the amount is correct and figures out which of the three "payable" functions (`createDApp`, `upvote`, and `downvote`) is being called by looking at the signature.
2. **upvoteEffect** external views
1. params: `(bytes32 _id, uint _amount)`
Mock add `_amount` to `balance`, calculate `mRate`, `mAvailable`, `mVMinted`, and `mEBalance`.
Returns the difference between `mEBalance` and the actual `effectiveBalance`.
3. **downvoteCost** public view
1. params: `(bytes32 _id)`
Specifying that each downvote must move the DApp down by 1% allows us to calculate the `cost` without integrating anything. Calculate the `votesRequired` to effect the DApp by the specified %.
Returns `balanceDownBy`, `votesRequired` and `cost`.
4. **\_createDApp** internal
1. params: `(address _from, bytes32 _id, uint _amount)`
Accepts some nominal amount of tokens (> 0) and creates a new Data struct with the `_id` passed to it, setting the new struct's `balance` and using that to calculate `balance`, `rate`, `available`, `votesMinted` and `effectiveBalance` (which is == `balance` at first).
Emit event containing new `effectiveBalance`.
4. **\_upvote** internal
1. params: `(address _from, bytes32 _id, uint _amount)`
Transfer SNT directly to the contract, which means donating directly to the DApp's `balance`, no money to the developer. Though the votes don't use a curve, we still need to recalculate `rate`, `available`, `votesMinted` and `effectiveBalance`.
Emit event containing new `effectiveBalance`.
4. **\_downvote** internal
1. params: `(address _from, bytes32 _id, uint _amount)`
Send SNT from user directly to developer in order to downvote. Call `downvoteCost` to get `balance_down_by`, `votes_required` and `cost`.
Add `votesRequired` to `votesCast`, recalculate `effectiveBalance`, and subtract `cost` from `available` so that `withdraw` works correctly.
Emit event containing new `effectiveBalance`.
8. **abiDecodeRegister** private
1. params: `(bytes memory _data)`
Helps decode the data passed to `receiveApproval` using assembly magic.
## Potential Attacks
1. **Sybil resistance?**
1. If I create a lot of accounts for one DApp, will that increase it's ranking?
2. If I vote for one DApp from lots of different accounts, in small amounts, rather than in 1 big amount from a single account, what effect does it have?
Creating many accounts for one DApp is not possible - each DApp is uniquely identified and by its `id` and ranked only by the amount of SNT staked on it. In the same way, there is no quadratic effect in this set up, so staking for a DApp from lots of different accounts in small amounts has no greater/lesser effect on its ranking than staking 1 large amount from a single account.
2. **Incentives to stake bad DApps and "force" the community to spend SNT to downvote?**
Remember, you never get back more SNT than you stake, so this is also economically sub-optimal. In addition, there will be a free "complaint" feature as part of the "downvote" screen. There is an important difference between "contractual" and "social" (i.e. the Status UI) reality. Status reserves the right to remove from our UI any DApp that actively violates [our principles](https://status.im/contribute/our_principles.html), though anyone else is free to fork the software and implement different social/UI rules for the same contractual reality. This protects even further against any incentive to submit bad/damaging DApps.
However, at the beginning of the Store, this is an attack vector: ranking highly requires but a small stake, and this could conceivably result in a successful, cheap hype campaign until we change the UI. The price of freedom is eternal vigilance.
3. **Stake a damaging DApp, force some downvotes, and then withdraw my stake?**
You can still never earn back quite as much as you initially staked, enforced by the condition in the `withdraw` function: `require(_amount <= available)`.
4. **What is left in the store when a DApp withdraws the SNT it staked?**
Simply `balance - available`, i.e. some small amount of SNT not available to be withdrawn.
## Rationale
This is a simple economic mechanism that
1. does not place high mental overheads on users and could conceivably be understood by a wider and non-technical audience and
2. does not require a lot of screen real estate (important on mobile). All that is required is a balance for each DApp and up/downvote carrots to it's right or left, a pattern already well understood on sites like Reddit etc.
Moreover, having SNT is not required to see (and benefit from) a well-curated list of DApps; only if you want to effect the rankings on that list do you require tokens, which also makes the UX considerably easier for non-technical users.
From the perspective of DApp Developers - they must still spend some capital to rank well, just as they currently do with SEO and AdWords etc., but _they stand to earn most of that back_ if the community votes on their product/service, and they can withdraw their stake at any time. The algorithm is entirely transparent and they know where they stand and why at all times.
## Notes
The beauty of Ethereum to me, can be summed up simply:
`By deploying immutable contracts to a shared, public computational surface - contracts whose data can be read deterministically by anyone with access to the internet - we can encode idealism into the way we run society.`
What's more, **what's different this time**, is that the idealism exists independently of the people who encoded it, who inevitably become corrupted, because we are all human.
However, there is hope in cryptoeconomics, which is not about egalitarianism, but about designing systems with no central point of control. Decentralisation is the goal; egalitarianism is a great success metric. But not the other way around, because egalitarianism is not something for which we can reasonably optimise.
## Copyright
Copyright and related rights for this specification waived via [CC0](https://creativecommons.org/publicdomain/zero/1.0/).

110
Makefile Normal file
View File

@ -0,0 +1,110 @@
.PHONY: help clean purge compile-contracts patch-ipfs patch-ws mk-build-dir copy-misc copy-backend compile-js copy-frontend archive
export NODE_ENV ?= localhost
export WALLET_PASSWORD ?= dev_password
export WALLET_MNEMONIC ?= erupt point century seek certain escape solution flee elegant hard please pen
ifeq ($(NODE_ENV),production)
export EMBARK_TARGET ?= livenet
else
ifeq ($(NODE_ENV), localhost)
export EMBARK_TARGET ?= development
else
export EMBARK_TARGET ?= testnet
endif
endif
HELP_FUN = \
%help; \
while(<>) { push @{$$help{$$2 // 'options'}}, [$$1, $$3] if /^([a-zA-Z\-]+)\s*:.*\#\#(?:@([a-zA-Z\-]+))?\s(.*)$$/ }; \
print "Usage: make [target]\n\n"; \
for (sort keys %help) { \
print "${WHITE}$$_:${RESET}\n"; \
for (@{$$help{$$_}}) { \
$$sep = " " x (22 - length $$_->[0]); \
print " ${YELLOW}$$_->[0]${RESET}$$sep${GREEN}$$_->[1]${RESET}\n"; \
}; \
print "\n"; \
}
help: ##@miscellaneous Show this help.
@perl -e '$(HELP_FUN)' $(MAKEFILE_LIST)
all: ##@build Build the final app.zip from scratch
all: node_modules clean compile-contracts mk-build-dir copy-misc copy-backend compile-js copy-frontend archive install-build
ifneq ($(NODE_ENV),localhost)
@echo "SUCCESS! Use the app.zip file."
else
@echo "SUCCESS! Execute 'yarn server-start' and browse http://localhost:4000"
endif
node_modules: ##@install Install the Node.js dependencies using Yarn
yarn install
check-prod-vars: ##@checks Check if the necesary env variables are set
ifneq ($(NODE_ENV),$(filter $(NODE_ENV),production development localhost))
@echo "Unknown NODE_ENV value: ${NODE_ENV}"
@echo "Use 'production' or 'development' or 'localhost'."
exit 1
endif
compile-contracts: ##@compile Compile the contracts using Embark.js
compile-contracts: check-prod-vars
./node_modules/.bin/embark build "${EMBARK_TARGET}"
compile-js: ##@compile Compile the React application
./WebApp/node_modules/.bin/cross-env NODE_ENV=production webpack --config internals/webpack/webpack.prod.babel.js --color -p --progress --hide-modules --display-optimization-bailout
mk-build-dir: ##@create Create the destination directory for full build if the folder doesn't exist
[ -d full-build ] || mkdir -p full-build
copy-backend: ##@copy Copy over the backend files to full-build dir
ifeq ($(NODE_ENV),localhost)
if [ -f ./full-build/yarn.lock ]; then \
cmp -s ./Backend/yarn.lock ./full-build/yarn.lock; \
RETVAL=$$?; \
if [ ! $$RETVAL -eq 0 ]; then \
echo "yarn.lock is different. Removing node_modules and replacing yarn.lock"; \
rm -rf full-build/yarn.lock full-build/node_modules; \
fi \
fi
rsync -r --exclude node_modules ./Backend/* ./full-build/
else
cp -r Backend/* full-build/
endif
copy-frontend: ##@copy Copy over the frontend files to full-build dir
mkdir full-build/frontend
cp -r WebApp/build/* full-build/frontend/
copy-misc: ##@copy Copy over the miscalenious config config files
cp .npmrc full-build/
archive: ##@archive Create the app.zip archive for use with ElasticBeanstalk when running on testnet or mainnet
ifneq ($(NODE_ENV),localhost)
archive: clean-archive
cd full-build && zip -r ../app.zip ./
endif
install-build:
ifeq ($(NODE_ENV),localhost)
cd full-build && yarn
endif
clean-archive: ##@clean Remove app.zip
ifneq ($(NODE_ENV),localhost)
rm -f app.zip
endif
clean-build-dir: ##@clean Remove full-build folder and keep node_modules (depending on environment)
ifeq ($(NODE_ENV),localhost)
find ./full-build -mindepth 1 ! -regex '^./full-build/\(node_modules\|yarn.lock\).*' -delete; 2> /dev/null
else
rm -fr full-build
endif
clean: clean-build-dir clean-archive ##@clean Cleanup all the build artifacts
purge: ##@clean Remove everything that isn't committed
git clean -dxf -f

178
README.md Normal file
View File

@ -0,0 +1,178 @@
# Discover
Discover new and useful DApps that are mobile-friendly and easy to use. Viewing curated information does not require any special tools, though effecting the way information is ranked will require a web3 wallet, whether that is Status, MetaMask, Trust, Brave or whichever one you prefer.
You can learn more about bonded curves and how Discover works [here](https://our.status.im/discover-a-brave-new-curve/).
## Table of Contents
- [Stack](#stack)
- [Deployed Contracts](#deployed-contracts)
- [Getting Started](#getting-started)
- [Contributing](#contributing)
<!-- - [License](#license) -->
### Stack
* Smart contract compiler Framework: [Embark v4.0.1](https://github.com/embarklabs/embark)
* JS Framework: [React](https://github.com/facebook/react) + [Typescript](https://github.com/microsoft/TypeScript)
* SEO & Metadata: [Helmet.js](https://helmetjs.github.io/)
* Blockchain components: [Ethers.js](https://github.com/ethers-io/ethers.js/) + [web3-react](https://github.com/NoahZinsmeister/web3-react)
* Styling: [JSS](https://cssinjs.org/?v=v10.0.3) + [Material UI](https://material-ui.com/)
* State management: [Redux](https://redux.js.org/) + [Redux-Saga](https://redux-saga.js.org/) + [Reselect](https://github.com/reduxjs/reselect)
* Template generation: [Plop](https://plopjs.com/) + [Handlebars.js](https://handlebarsjs.com/)
* Compiling: [Webpack](https://webpack.js.org/) + [Babel](https://babeljs.io/)
* Forms & Validation: [Formik](https://jaredpalmer.com/formik) + [Yup](https://github.com/jquense/yup)
* Notifications: [Toastify](https://fkhadra.github.io/react-toastify/)
### Structure
The boiler is set up as a [Yarn](https://yarnpkg.com/) workspace/monorepo, this allows for adding additional workspaces like `Blockchain` or `Server` if required and executing parallel scripts across all spaces.
### Webapp
The web app is structured as a standard React app, the important areas to note is the `/api`, `/domains`, `/containers` & `/components`.
The api interactions have helpers to automatically format form field data, collect access token headers and allow for reducer-esque api calling functions.
The domains folder acts as the apps main Daemon & singleton business logic management components, general app actions are managed here, reducers for the domain state, selectors etc can be found here. Domains are meant to be globally accessable services facilitated through Redux-Saga for async actions
The containers folder allows for managing the business logic of constructing selectors, action dispatch functions, and any complex operations that should be managed seperately from the mark up.
The components folder is for markup files & styling.
## Install
The goal of our local build process is to abstract away the complexity with smart contracts so that you can focus on adding useful new functionality through React-based bounties that are easy to get started on.
### Prerequisites
1. [Node v10](https://github.com/nvm-sh/nvm) or higher.
2. [Yarn](https://yarnpkg.com/).
3. [mongodb](https://www.mongodb.com/).
First, run yarn to install the workspace dependancies:
```
yarn install
```
On Linux, setting up `mongodb` is as easy as `sudo apt install -y mongodb`, which will also start it automatically. You can stop/restart your local DB any time with `sudo systemctl stop mongodb`, or get its status with `sudo systemctl status mongodb`. I recommend using the simple [robo3t](https://robomongo.org/download) to view and edit your DB easily (you'll need to set DApps to `APPROVED` to see `Edit` and `Withdraw` options and to see them appear in Categories etc.)
### Mongo set up
1. `export DB_CONNECTION=mongodb://localhost:27017/mydb`. Make sure you have `DB_CONNECTION` set as an ENV variable so the app knows where to find your local DB.
2. TODO`yarn run build:localhost`. This will:
1. Compile all your contracts using Embark, connecting to Ropsten and IPFS through an Infura gateway.
2. Deploy a new instance of Discover onto the Ropsten test network for you to work from. It will only be deployed once, after that the address of your contract is stored in, and fetched from, `shared.development.chains.json`.
3. Build the frontend, create a directory called `full-build`, move each directory from the `Backend` into it, and include the `frontend` as a directory of its own. It will make sure `node_modules` are installed, then you can serve everything in `full-build` by running:
3. `yarn server-start`. Navigate to `http://localhost:4000` to get developing cool new things for the future of curated information.
**Note:**
1. Change this line in [Backend/config/index.js](https://github.com/dap-ps/discover/blob/master/Backend/config/index.js#L24) to your local Ropsten version of the contract, stored in `shared.development.chains.json`.
2. You'll need to visit [simpledapp.eth using Status](https://status.im/get/) -> Assets Tab -> Request `STT`. This is the Status Test Token on Ropsten that needs to be used with your instance of Discover in order to submit/upvote/downvote in your local app. Using a proper test network even for local development allows us to better understand what the user experience is actually like in production more easily.
## Usage
### Development
For running a local instance use the command:
```
yarn start:dev
```
### Template generator
To make use of the webapp template generator, first open a terminal and navigate to `./WebApp`, run the command `yarn generate` & follow the prompts.
### Build
To build the project across workspaces, at the root of the directory, run the command `yarn build`.
### Production build
1. Run `yarn` to install the relevant packages
2. Set the environment files with the required values in `Backend`, `Contracts` & `WebApp`
3. Run `yarn create:fullbuild`
4. `app.zip` will be found in the root of the repo
## Contributions
Frontend boilerplate designed & crafted originally by [@panterazar](https://github.com/panterazar)
General updates & modifications by [@RyRy79261](https://github.com/RyRy79261)
## Deployed Contracts
Ropsten (the first is `STT`, the Status Test Token):
```
MiniMeToken: { address: '0xc55cf4b03948d7ebc8b9e8bad92643703811d162' },
Discover: { address: '0x008db8b84547982e8F6677D38e9b9ea64F3ccB8B' },
```
Mainnet:
```
MiniMeToken: { address: '0x744d70fdbe2ba4cf95131626614a1763df805b9e' },
Discover: { address: '0x5bCF2767F86f14eDd82053bfBfd5069F68C2C5F8' },
```
## Getting Started
The goal of our local build process is to abstract away the complexity with smart contracts so that you can focus on adding useful new functionality through React-based bounties that are easy to get started on.
#### 3 Prerequisites
1. [Node v10](https://github.com/nvm-sh/nvm) or higher.
2. [Yarn](https://yarnpkg.com/).
3. [mongodb](https://www.mongodb.com/).
On Linux, setting up `mongodb` is as easy as `sudo apt install -y mongodb`, which will also start it automatically. You can stop/restart your local DB any time with `sudo systemctl stop mongodb`, or get its status with `sudo systemctl status mongodb`. I recommend using the simple [robo3t](https://robomongo.org/download) to view and edit your DB easily (you'll need to set DApps to `APPROVED` to see `Edit` and `Withdraw` options and to see them appear in Categories etc.)
#### 4 Quick Steps
1. `export DB_CONNECTION=mongodb://localhost:27017/mydb`. Make sure you have `DB_CONNECTION` set as an ENV variable so the app knows where to find your local DB.
2. `yarn start:dev`. This will:
1. Compile all your contracts using Embark, connecting to Ropsten and IPFS through an Infura gateway.
2. Deploy a new instance of Discover onto the Ropsten test network for you to work from. It will only be deployed once, after that the address of your contract is stored in, and fetched from, `shared.development.chains.json`.
**Note:**
1. Change this line in [Backend/config/index.js](https://github.com/dap-ps/discover/blob/master/Backend/config/index.js#L24) to your local Ropsten version of the contract, stored in `shared.development.chains.json`.
2. You'll need to visit [simpledapp.eth using Status](https://status.im/get/) -> Assets Tab -> Request `STT`. This is the Status Test Token on Ropsten that needs to be used with your instance of Discover in order to submit/upvote/downvote in your local app. Using a proper test network even for local development allows us to better understand what the user experience is actually like in production more easily.
#### Work to be done
1. Integrate Kyber functionality so that people can use (at least) SNT, ETH and DAI to participate in the store (it just gets exchanged in the background into SNT before being submitted to the contract).
2. Create a `downvote pool` for each DApp so that anyone can downvote by any amount, not just 1%. When the pool hits 1%, the downvote is sent to the contract. This will be important if people ever stake large amounts, 1% of which may be too expensive for individual users. It will potentially amplify "the community's" ability to respond to bad actors.
3. Integrate [embeddable whisper chats](https://github.com/status-im/status-chat-widget) into the site, so that it is easy to plug into the community chat directly "behind" each DApp (it's just the name of the DApp as a whisper topic, i.e. #cryptokitties).
4. Research a way to fetch information about popular DApps on Ethereum through non-economic metrics. Perhaps this means just plugging into an API from OpenSea/StateOfTheDApps for now and leveraging their work. Perhaps it means figuring out how to [gossip information about use of DApps via whisper](https://discuss.status.im/t/friend-to-friend-content-discovery-community-feeds/1212)?
#### Running unit tests
Use `./node_modules/.bin/embark test`
To test a specific smart contract you can use `./node_modules/.bin/embark test test/Discover_spec.js`.
#### Running slither
`slither . --exclude naming-convention --filter-paths token`
Make sure you get TrailofBits' [latest static analysis tool](https://securityonline.info/slither/), and do your own static analysis on the relevant contracts that you are working on.
## Available Scripts
This project is based on Embark v4.0.1, with a few things customised for React.
```
yarn run build:dev
```
or
```
yarn run build
```
Builds the app into the `full-build` directory and creates the `app.zip` ready for use with ElasticBeanstalk.

14
WebApp/.editorconfig Executable file
View File

@ -0,0 +1,14 @@
# editorconfig.org
root = true
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 2
trim_trailing_whitespace = true
[*.md]
trim_trailing_whitespace = false

9
WebApp/.env.example Executable file
View File

@ -0,0 +1,9 @@
INFURA_KEY=8675214b97b44e96b70d05326c61fd6a
MAINNET_SNT=0x744d70fdbe2ba4cf95131626614a1763df805b9e
ROPSTEN_SNT=0xc55cf4b03948d7ebc8b9e8bad92643703811d162
MAINNET_DISCOVER=0x5bCF2767F86f14eDd82053bfBfd5069F68C2C5F8
ROPSTEN_DISCOVER=0x7B064fc5FDc4BACf2147A262881374d22d0fF23B
API_HOST=localhost:4000
API_SCHEMA=http
TARGET_NETWORK=3

12
WebApp/.gitignore vendored Executable file
View File

@ -0,0 +1,12 @@
# Don't check auto-generated stuff into git
coverage
build
node_modules
stats.json
# Cruft
.DS_Store
npm-debug.log
.idea
.awcache
discover.zip

1
WebApp/.nvmrc Executable file
View File

@ -0,0 +1 @@
lts/carbon

8
WebApp/.prettierignore Executable file
View File

@ -0,0 +1,8 @@
build/
node_modules/
internals/generators/
internals/scripts/
package-lock.json
yarn.lock
package.json
build/

8
WebApp/.prettierrc Executable file
View File

@ -0,0 +1,8 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": true,
"singleQuote": true,
"trailingComma": "all"
}

5
WebApp/.stylelintrc Executable file
View File

@ -0,0 +1,5 @@
{
"extends": [
"stylelint-config-recommended",
]
}

15
WebApp/README.md Executable file
View File

@ -0,0 +1,15 @@
# React + Redux Typescript WebApp boilerplate
This project is built off of [React Boilerplate](https://github.com/react-boilerplate/react-boilerplate/).
## Documentation
- [**The Hitchhikers Guide to `react-boilerplate`**](https://github.com/react-boilerplate/react-boilerplate/blob/master/docs/general/introduction.md): An introduction for newcomers to this boilerplate.
- [Overview](https://github.com/react-boilerplate/react-boilerplate/tree/master/docs/general): A short overview of the included tools
- [**Commands**](https://github.com/react-boilerplate/react-boilerplate/blob/master/docs/general/commands.md): Getting the most out of this boilerplate
- [Testing](https://github.com/react-boilerplate/react-boilerplate/tree/master/docs/testing): How to work with the built-in test harness
- [Styling](https://github.com/react-boilerplate/react-boilerplate/tree/master/docs/css): How to work with the CSS tooling
- [Your app](https://github.com/react-boilerplate/react-boilerplate/tree/master/docs/js): Supercharging your app with Routing, Redux, simple
asynchronicity helpers, etc.
- [Typescript](https://github.com/Can-Sahin/react-boilerplate-typescript/blob/master/docs/general/typescript.md) React Boilerplate + Typescript
- [**Troubleshooting**](https://github.com/react-boilerplate/react-boilerplate/blob/master/docs/general/gotchas.md): Solutions to common problems faced by developers.

32
WebApp/babel.config.js Executable file
View File

@ -0,0 +1,32 @@
module.exports = {
presets: [
[
'@babel/preset-env',
{
modules: false,
},
],
'@babel/preset-react',
],
plugins: [
'@babel/plugin-proposal-class-properties',
'@babel/plugin-syntax-dynamic-import',
],
env: {
production: {
only: ['app'],
plugins: [
'lodash',
'transform-react-remove-prop-types',
'@babel/plugin-transform-react-inline-elements',
'@babel/plugin-transform-react-constant-elements',
],
},
test: {
plugins: [
'@babel/plugin-transform-modules-commonjs',
'dynamic-import-node',
],
},
},
};

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More