Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion benchmarks/000.microbenchmarks/010.sleep/config.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
{
"timeout": 120,
"memory": 128,
"languages": ["python", "nodejs"]
"languages": ["python", "nodejs"],
"modules": []
}
2 changes: 1 addition & 1 deletion benchmarks/000.microbenchmarks/010.sleep/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@
def buckets_count():
return (0, 0)

def generate_input(data_dir, size, input_buckets, output_buckets, upload_func):
def generate_input(data_dir, size, benchmarks_bucket, input_paths, output_paths, upload_func, nosql_func):
return { 'sleep': size_generators[size] }
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
{
"timeout": 30,
"memory": 128,
"languages": ["python"]
"languages": ["python"],
"modules": []
}
14 changes: 9 additions & 5 deletions benchmarks/000.microbenchmarks/020.network-benchmark/input.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@


def buckets_count():
return (0, 1)
return 0, 1


def generate_input(data_dir, size, input_buckets, output_buckets, upload_func):
return {'output-bucket': output_buckets[0]}
def generate_input(data_dir, size, benchmarks_bucket, input_paths, output_paths, upload_func, nosql_func):
return {
'bucket': {
'bucket': benchmarks_bucket,
'output': output_paths[0],
},
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import csv
import json
import os.path
import socket
from datetime import datetime
from time import sleep
Expand All @@ -12,7 +13,8 @@ def handler(event):
address = event['server-address']
port = event['server-port']
repetitions = event['repetitions']
output_bucket = event.get('output-bucket')
output_bucket = event.get('bucket').get('bucket')
output_prefix = event.get('bucket').get('output')
times = []
i = 0
socket.setdefaulttimeout(3)
Expand Down Expand Up @@ -50,6 +52,7 @@ def handler(event):
writer.writerow(row)

client = storage.storage.get_instance()
key = client.upload(output_bucket, 'results-{}.csv'.format(request_id), '/tmp/data.csv')
filename = 'results-{}.csv'.format(request_id)
key = client.upload(output_bucket, os.path.join(output_prefix, filename), '/tmp/data.csv')

return { 'result': key }
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
{
"timeout": 30,
"memory": 128,
"languages": ["python"]
"languages": ["python"],
"modules": []
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@


def buckets_count():
return (0, 1)
return 0, 1

def generate_input(data_dir, size, input_buckets, output_buckets, upload_func):
return {'output-bucket': output_buckets[0]}
def generate_input(data_dir, size, benchmarks_bucket, input_paths, output_paths, upload_func, nosql_func):
return {
'bucket': {
'bucket': benchmarks_bucket,
'output': output_paths[0],
},
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import csv
import json
import os
import socket
from datetime import datetime
from time import sleep
Expand All @@ -12,7 +13,8 @@ def handler(event):
address = event['server-address']
port = event['server-port']
repetitions = event['repetitions']
output_bucket = event.get('output-bucket')
output_bucket = event.get('bucket').get('bucket')
output_prefix = event.get('bucket').get('output')
times = []
print("Starting communication with {}:{}".format(address, port))
i = 0
Expand Down Expand Up @@ -64,7 +66,8 @@ def handler(event):
writer.writerow(row)

client = storage.storage.get_instance()
key = client.upload(output_bucket, 'results-{}.csv'.format(request_id), '/tmp/data.csv')
filename = 'results-{}.csv'.format(request_id)
key = client.upload(output_bucket, os.path.join(output_prefix, filename), '/tmp/data.csv')
else:
key = None

Expand Down
3 changes: 2 additions & 1 deletion benchmarks/000.microbenchmarks/040.server-reply/config.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
{
"timeout": 120,
"memory": 128,
"languages": ["python", "nodejs"]
"languages": ["python", "nodejs"],
"modules": []
}
2 changes: 1 addition & 1 deletion benchmarks/000.microbenchmarks/040.server-reply/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@
def buckets_count():
return (0, 0)

def generate_input(data_dir, size, input_buckets, output_buckets, upload_func):
def generate_input(data_dir, size, benchmarks_bucket, input_paths, output_paths, upload_func, nosql_func):
return { 'sleep': size_generators[size] }
77 changes: 53 additions & 24 deletions docs/usage.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,46 +71,71 @@ In addition to the cloud deployment, we provide an opportunity to launch benchma
This allows us to conduct debugging and a local characterization of the benchmarks.

First, launch a storage instance. The command below is going to deploy a Docker container,
map the container's port to port `9011` on host network, and write storage instance configuration
to file `out_storage.json`
map the container's port to port defined in the configuration on host network, and write storage
instance configuration to file `out_storage.json`

```
./sebs.py storage start minio --port 9011 --output-json out_storage.json
```bash
./sebs.py storage start all config/storage.json --output-json out_storage.json
```

Then, we need to update the configuration of `local` deployment with information on the storage
instance. The `.deployment.local` object in the configuration JSON must contain a new object
`storage`, with the data provided in the `out_storage.json` file. Fortunately, we can achieve
this automatically with a single command by using `jq`:

```
```bash
jq '.deployment.local.storage = input' config/example.json out_storage.json > config/local_deployment.json
```

The output file will contain a JSON object that should look similar to this one:

```json
"deployment": {
"name": "local",
"local": {
"storage": {
"address": "172.17.0.2:9000",
"mapped_port": 9011,
"access_key": "XXXXX",
"secret_key": "XXXXX",
"instance_id": "XXXXX",
"input_buckets": [],
"output_buckets": [],
"type": "minio"
}
{
"deployment": {
"name": "local",
"local": {
"storage": {
"object": {
"type": "minio",
"minio": {
"address": "172.17.0.3:9000",
"mapped_port": 9011,
"access_key": "xxx",
"secret_key": "xxx",
"instance_id": "xxx",
"output_buckets": [],
"input_buckets": [],
"version": "xxx",
"data_volume": "minio-volume",
"type": "minio"
}
},
"nosql": {
"type": "scylladb",
"scylladb": {
"address": "172.17.0.4:8000",
"mapped_port": 9012,
"alternator_port": 8000,
"access_key": "xxx",
"secret_key": "xxx",
"instance_id": "xxx",
"region": "xxx",
"cpus": 1,
"memory": "xxx",
"version": "xxx",
"data_volume": "scylladb-volume"
}
}
}
},
}
}
```

To launch Docker containers, use the following command - this example launches benchmark `110.dynamic-html` with size `test`:

```
./sebs.py local start 110.dynamic-html test out_benchmark.json --config config/local_deployment.json --deployments 1
```bash
./sebs.py local start 110.dynamic-html test out_benchmark.json --config config/local_deployment.json --deployments 1 --remove-containers --architecture=x64
```

The output file `out_benchmark.json` will contain the information on containers deployed and the endpoints that can be used to invoke functions:
Expand Down Expand Up @@ -142,18 +167,22 @@ The output file `out_benchmark.json` will contain the information on containers

In our example, we can use `curl` to invoke the function with provided input:

```
curl 172.17.0.3:9000 --request POST --data '{"random_len": 10,"username": "testname"}' --header 'Content-Type: application/json'
```bash
curl $(jq -rc ".functions[0].url" out_benchmark.json) \
--request POST \
--data $(jq -rc ".inputs[0]" out_benchmark.json) \
--header 'Content-Type: application/json'
```

To stop containers, you can use the following command:

```
./sebs.py local stop out_benchmark.json
./sebs.py storage stop out_storage.json
./sebs.py storage stop all out_storage.json
```

The stopped containers won't be automatically removed unless the option `--remove-containers` has been passed to the `start` command.
Note: The stopped benchmark containers won't be automatically removed
unless the option `--remove-containers` has been passed to the `local start` command.

#### Memory Measurements

Expand Down
Loading