committed by
GitHub
60 changed files with 7411 additions and 325 deletions
@ -0,0 +1,15 @@ |
|||||
|
dist: xenial |
||||
|
sudo: required |
||||
|
env: |
||||
|
global: |
||||
|
- DOCKER_REPO=perara/wg-manager |
||||
|
before_install: |
||||
|
- curl -fsSL https://get.docker.com | sh |
||||
|
- echo '{"experimental":"enabled"}' | sudo tee /etc/docker/daemon.json |
||||
|
- mkdir -p $HOME/.docker |
||||
|
- echo '{"experimental":"enabled"}' | sudo tee $HOME/.docker/config.json |
||||
|
- sudo service docker start |
||||
|
install: |
||||
|
- docker run --rm --privileged multiarch/qemu-user-static --reset -p yes |
||||
|
- docker buildx create --name xbuilder --use |
||||
|
script: bash ci.sh |
@ -0,0 +1,16 @@ |
|||||
|
import requests |
||||
|
|
||||
|
if __name__ == "__main__": |
||||
|
sess = requests.Session() |
||||
|
|
||||
|
resp = sess.post("http://localhost:8888/api/v1/login", data={ |
||||
|
"username": "admin", |
||||
|
"password": "admin" |
||||
|
}) |
||||
|
print(resp.json()) |
||||
|
sess.headers.update({ |
||||
|
"Authorization": f"Bearer {resp.json()['access_token']}" |
||||
|
}) |
||||
|
|
||||
|
for _ in range(20): |
||||
|
print(sess.get("http://localhost:8888/api/v1/wg/generate_psk").json()) |
@ -0,0 +1,62 @@ |
|||||
|
version: "2.1" |
||||
|
services: |
||||
|
|
||||
|
server: |
||||
|
container_name: wg-manager |
||||
|
build: . |
||||
|
restart: always |
||||
|
sysctls: |
||||
|
net.ipv6.conf.all.disable_ipv6: 0 |
||||
|
cap_add: |
||||
|
- NET_ADMIN |
||||
|
#network_mode: host # Alternatively |
||||
|
ports: |
||||
|
- 11820:11820/udp |
||||
|
- 51800-51900:51800-51900/udp |
||||
|
- 8888:8888 |
||||
|
environment: |
||||
|
HOST: 0.0.0.0 |
||||
|
PORT: 8888 |
||||
|
ADMIN_USERNAME: admin |
||||
|
ADMIN_PASSWORD: admin |
||||
|
WEB_CONCURRENCY: 2 |
||||
|
SERVER_INIT_INTERFACE_START: 1 |
||||
|
|
||||
|
#endpoint dynamic variables: ||external|| , ||internal|| |
||||
|
SERVER_INIT_INTERFACE: '{"address":"10.0.200.1","v6_address":"fd42:42:42::1","subnet":24,"v6_subnet":64,"interface":"wg0","listen_port":"51820","endpoint":"server","dns":"10.0.200.1,8.8.8.8","private_key":"","public_key":"","post_up":"","post_down":"","configuration":"","is_running":false,"peers":[]}' |
||||
|
SERVER_STARTUP_API_KEY: thisisasecretkeythatnobodyknows |
||||
|
networks: |
||||
|
- wg-manager-net |
||||
|
|
||||
|
client: |
||||
|
container_name: wg-manager-server-with-client |
||||
|
build: . |
||||
|
restart: always |
||||
|
sysctls: |
||||
|
net.ipv6.conf.all.disable_ipv6: 0 |
||||
|
cap_add: |
||||
|
- NET_ADMIN |
||||
|
ports: |
||||
|
- 8889:8889 |
||||
|
privileged: true |
||||
|
environment: |
||||
|
HOST: 0.0.0.0 # Optional (For Accessing WEB-Gui) |
||||
|
PORT: 8889 # Optional (Web-GUI Listen Port) |
||||
|
WEB_CONCURRENCY: 1 # Optional |
||||
|
ADMIN_USERNAME: admin |
||||
|
ADMIN_PASSWORD: admin |
||||
|
INIT_SLEEP: 5 # If you run into concurrency issues |
||||
|
SERVER: 0 # If you want to host a server as well |
||||
|
CLIENT: 1 # If you want to connect to servers |
||||
|
CLIENT_START_AUTOMATICALLY: 1 # If you want the client to start automatically |
||||
|
CLIENT_1_NAME: "client-1" # Name of first client |
||||
|
CLIENT_1_ROUTES: "10.0.200.0/24" |
||||
|
CLIENT_1_SERVER_HOST: "http://server:8888" # Endpoint of first server |
||||
|
CLIENT_1_SERVER_INTERFACE: "wg0" # Interface of first server (to get config) |
||||
|
CLIENT_1_API_KEY: "thisisasecretkeythatnobodyknows" # API-Key of first server (to get config) |
||||
|
networks: |
||||
|
- wg-manager-net |
||||
|
|
||||
|
networks: |
||||
|
wg-manager-net: |
||||
|
driver: bridge |
@ -1,5 +1,6 @@ |
|||||
#!/usr/bin/env bash |
#!/usr/bin/env bash |
||||
|
cd .. |
||||
docker login |
docker login |
||||
|
|
||||
docker build -t perara/wg-manager . |
docker build -t perara/wg-manager:dev . |
||||
docker push perara/wg-manager |
docker push perara/wg-manager:dev |
||||
|
File diff suppressed because it is too large
@ -0,0 +1,27 @@ |
|||||
|
from sqlalchemy.orm import Session |
||||
|
|
||||
|
import models |
||||
|
|
||||
|
|
||||
|
def add_initial_api_key_for_admin(sess: Session, api_key, ADMIN_USERNAME): |
||||
|
|
||||
|
db_user = sess.query(models.User)\ |
||||
|
.filter_by(username=ADMIN_USERNAME)\ |
||||
|
.one() |
||||
|
|
||||
|
exists_api_key = sess.query(models.UserAPIKey)\ |
||||
|
.filter_by( |
||||
|
user_id=db_user.id, |
||||
|
key=api_key |
||||
|
)\ |
||||
|
.count() |
||||
|
|
||||
|
if exists_api_key == 0: |
||||
|
db_api_key = models.UserAPIKey() |
||||
|
db_api_key.key = api_key |
||||
|
db_api_key.user_id = db_user.id |
||||
|
|
||||
|
sess.add(db_api_key) |
||||
|
sess.commit() |
||||
|
|
||||
|
return True |
@ -0,0 +1,21 @@ |
|||||
|
from sqlalchemy import * |
||||
|
from migrate import * |
||||
|
|
||||
|
|
||||
|
def upgrade(migrate_engine): |
||||
|
try: |
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
server = Table('server', meta, autoload=True) |
||||
|
subnet = Column('subnet', Integer, nullable=False) |
||||
|
subnet.create(server) |
||||
|
except: |
||||
|
pass |
||||
|
|
||||
|
|
||||
|
def downgrade(migrate_engine): |
||||
|
try: |
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
server = Table('server', meta, autoload=True) |
||||
|
server.c.subnet.drop() |
||||
|
except: |
||||
|
pass |
@ -0,0 +1,32 @@ |
|||||
|
from sqlalchemy import * |
||||
|
from migrate import * |
||||
|
|
||||
|
|
||||
|
def upgrade(migrate_engine): |
||||
|
try: |
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
server = Table('server', meta, autoload=True) |
||||
|
v6_address_server = Column('v6_address', VARCHAR, unique=True, nullable=True) |
||||
|
v6_address_server.create(server) |
||||
|
|
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
peer = Table('peer', meta, autoload=True) |
||||
|
v6_address_peer = Column('v6_address', VARCHAR, nullable=True) |
||||
|
v6_address_peer.create(peer) |
||||
|
except: |
||||
|
pass |
||||
|
|
||||
|
|
||||
|
def downgrade(migrate_engine): |
||||
|
try: |
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
server = Table('server', meta, autoload=True) |
||||
|
server.c.v6_address.drop() |
||||
|
|
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
peer = Table('peer', meta, autoload=True) |
||||
|
peer.c.v6_address.drop() |
||||
|
except: |
||||
|
pass |
||||
|
|
||||
|
|
@ -0,0 +1,21 @@ |
|||||
|
from sqlalchemy import * |
||||
|
from migrate import * |
||||
|
|
||||
|
|
||||
|
def upgrade(migrate_engine): |
||||
|
try: |
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
peer = Table('server', meta, autoload=True) |
||||
|
v6_subnet = Column('v6_subnet', INTEGER) |
||||
|
v6_subnet.create(peer) |
||||
|
except: |
||||
|
pass |
||||
|
|
||||
|
|
||||
|
def downgrade(migrate_engine): |
||||
|
try: |
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
peer = Table('server', meta, autoload=True) |
||||
|
peer.c.v6_subnet.drop() |
||||
|
except: |
||||
|
pass |
@ -0,0 +1,34 @@ |
|||||
|
from sqlalchemy import * |
||||
|
from migrate import * |
||||
|
|
||||
|
|
||||
|
def upgrade(migrate_engine): |
||||
|
try: |
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
server = Table('server', meta, autoload=True) |
||||
|
read_only = Column('read_only', INTEGER, default=0) |
||||
|
read_only.create(server) |
||||
|
except: |
||||
|
pass |
||||
|
|
||||
|
try: |
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
peer = Table('peer', meta, autoload=True) |
||||
|
read_only = Column('read_only', INTEGER, default=0) |
||||
|
read_only.create(peer) |
||||
|
except: |
||||
|
pass |
||||
|
|
||||
|
def downgrade(migrate_engine): |
||||
|
try: |
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
server = Table('server', meta, autoload=True) |
||||
|
server.c.read_only.drop() |
||||
|
except: |
||||
|
pass |
||||
|
try: |
||||
|
meta = MetaData(bind=migrate_engine) |
||||
|
server = Table('peer', meta, autoload=True) |
||||
|
server.c.read_only.drop() |
||||
|
except: |
||||
|
pass |
@ -0,0 +1,56 @@ |
|||||
|
import abc |
||||
|
from pathlib import Path |
||||
|
import subprocess |
||||
|
import shlex |
||||
|
|
||||
|
|
||||
|
class BaseObfuscation(abc.ABC): |
||||
|
|
||||
|
def __init__(self, binary_name=None, binary_path=None, algorithm=None): |
||||
|
|
||||
|
assert binary_name is not None or binary_path is not None |
||||
|
self.binary_name = binary_name if binary_name is not None else Path(self.binary_path).name |
||||
|
self.binary_path = binary_path if binary_path else "" |
||||
|
self.algorithm = algorithm |
||||
|
|
||||
|
def ensure_installed(self): |
||||
|
|
||||
|
# Attempt to find process by path |
||||
|
binary = Path(self.binary_path) |
||||
|
if not binary.is_file(): |
||||
|
# Did not find by path, attempt to find using which |
||||
|
proc_which = subprocess.Popen(["which", self.binary_name], stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
||||
|
data = [x.decode().strip() for x in proc_which.communicate() if x != b''][0] |
||||
|
|
||||
|
if proc_which.returncode != 0: |
||||
|
raise RuntimeError("Could not find binary '%s'" % data) |
||||
|
|
||||
|
self.binary_path = data |
||||
|
|
||||
|
def execute(self, *args, kill_first=False, override_command=None): |
||||
|
|
||||
|
if kill_first: |
||||
|
# TODO try to delete by full name as we dont want to kill other processes. |
||||
|
pattern = self.binary_name |
||||
|
self.execute(*[pattern], override_command="pkill") |
||||
|
#pattern = self.binary_path + " " + ' '.join(args) |
||||
|
#print(pattern) |
||||
|
#kill_output, kill_code = self.execute(*[pattern], override_command="pkill") |
||||
|
|
||||
|
command = override_command if override_command is not None else self.binary_path |
||||
|
print(shlex.join([command] + list(args))) |
||||
|
proc_which = subprocess.Popen([command] + list(args), stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
||||
|
raw_data = proc_which.communicate() |
||||
|
|
||||
|
data = [x.decode().strip() for x in raw_data if x != b''] |
||||
|
if len(data) == 0: |
||||
|
data = "" |
||||
|
else: |
||||
|
data = data[0] |
||||
|
return data, proc_which.returncode |
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
|
@ -0,0 +1,30 @@ |
|||||
|
from script.obfuscate import BaseObfuscation |
||||
|
import re |
||||
|
|
||||
|
|
||||
|
class ObfuscateOBFS4(BaseObfuscation): |
||||
|
|
||||
|
def __init__(self): |
||||
|
super().__init__( |
||||
|
binary_name="obfs4proxy", |
||||
|
binary_path="/usr/bin/obfs4proxy", |
||||
|
algorithm="obfs4" |
||||
|
) |
||||
|
|
||||
|
self.ensure_installed() |
||||
|
|
||||
|
def ensure_installed(self): |
||||
|
super().ensure_installed() |
||||
|
|
||||
|
output, code = self.execute("-version") |
||||
|
|
||||
|
if re.match(f'{self.binary_name}-[0-9]+.[0-9]+.[0-9]+', output) and code == 0: |
||||
|
return True |
||||
|
else: |
||||
|
raise RuntimeError(f"Could not verify that {self.binary_name} is installed correctly.") |
||||
|
|
||||
|
|
||||
|
if __name__ == "__main__": |
||||
|
|
||||
|
x = ObfuscateOBFS4() |
||||
|
x.ensure_installed() |
@ -0,0 +1,119 @@ |
|||||
|
from pathlib import Path |
||||
|
|
||||
|
import requests |
||||
|
|
||||
|
import const |
||||
|
from script.obfuscate import BaseObfuscation |
||||
|
import re |
||||
|
import os |
||||
|
import qrcode |
||||
|
import socket |
||||
|
|
||||
|
from script.obfuscate.obfs4 import ObfuscateOBFS4 |
||||
|
|
||||
|
|
||||
|
class ObfuscationViaTOR(BaseObfuscation): |
||||
|
|
||||
|
def __init__(self, algorithm: BaseObfuscation): |
||||
|
super().__init__( |
||||
|
binary_name="tor" |
||||
|
) |
||||
|
self.algorithm = algorithm |
||||
|
self.tor_data_dir = "/tmp/wg-manager-tor-proxy" |
||||
|
self.tor_config_file = "/tmp/wg-manager-tor-proxy/torrc" |
||||
|
self.tor_fingerprint_file = f"{self.tor_data_dir}/fingerprint" |
||||
|
self.tor_bridge_file = f"{self.tor_data_dir}/pt_state/obfs4_bridgeline.txt" |
||||
|
|
||||
|
Path(self.tor_config_file).touch() |
||||
|
os.makedirs(self.tor_data_dir, exist_ok=True) |
||||
|
|
||||
|
def __del__(self): |
||||
|
pass |
||||
|
|
||||
|
def ensure_installed(self): |
||||
|
super().ensure_installed() |
||||
|
output, code = self.execute("--version") |
||||
|
|
||||
|
if re.match(f'Tor version .*', output) and code == 0: |
||||
|
return True |
||||
|
else: |
||||
|
raise RuntimeError(f"Could not verify that {self.binary_name} is installed correctly.") |
||||
|
|
||||
|
def start(self): |
||||
|
|
||||
|
output, code = self.execute( |
||||
|
"-f", self.tor_config_file, |
||||
|
"--DataDirectory", self.tor_data_dir, |
||||
|
"--RunAsDaemon", "1", |
||||
|
"--ExitPolicy", "reject *:*", |
||||
|
"--ORPort", str(const.OBFUSCATE_SOCKS_TOR_PORT), |
||||
|
"--BridgeRelay", "1", |
||||
|
"--PublishServerDescriptor", "0", |
||||
|
"--ServerTransportPlugin", f"{self.algorithm.algorithm} exec {self.algorithm.binary_path}", |
||||
|
"--ServerTransportListenAddr", f"{self.algorithm.algorithm} 0.0.0.0:{const.OBFUSCATE_TOR_LISTEN_ADDR}", |
||||
|
"--ExtORPort", "auto", |
||||
|
"--ContactInfo", "wg-manager@github.com", |
||||
|
"--Nickname", "wgmanager", |
||||
|
kill_first=True |
||||
|
) |
||||
|
|
||||
|
print(output) |
||||
|
|
||||
|
def generate_bridge_line(self, local=False): |
||||
|
|
||||
|
if local: |
||||
|
ip_address = socket.gethostbyname(socket.gethostname()) |
||||
|
else: |
||||
|
ip_address = requests.get("https://api.ipify.org").text |
||||
|
|
||||
|
with open(self.tor_fingerprint_file, "r") as f: |
||||
|
fingerprint = f.read().split(" ") |
||||
|
assert len(fingerprint) == 2, "Could not load fingerprint correctly. " \ |
||||
|
"Should be a list of 2 items (name, fingerprint)" |
||||
|
fingerprint = fingerprint[1] |
||||
|
|
||||
|
with open(self.tor_bridge_file, "r") as f: |
||||
|
bridge_line_raw = f.read() |
||||
|
|
||||
|
bridge_line = re.search(r"^Bridge .*", bridge_line_raw, re.MULTILINE).group(0) |
||||
|
bridge_line = bridge_line\ |
||||
|
.replace("<IP ADDRESS>", ip_address)\ |
||||
|
.replace("<PORT>", str(const.OBFUSCATE_TOR_LISTEN_ADDR))\ |
||||
|
.replace("<FINGERPRINT>", fingerprint)\ |
||||
|
.replace("Bridge ", "bridge://")\ |
||||
|
.replace("\n", "") |
||||
|
#bridge_line = f"bridge://{self.algorithm.algorithm} {ip_address}:{const.OBFUSCATE_SOCKS_TOR_PORT} {fingerprint}" |
||||
|
print(bridge_line) |
||||
|
return bridge_line |
||||
|
|
||||
|
def output_qr(self, text, image=False): |
||||
|
|
||||
|
qr = qrcode.QRCode( |
||||
|
version=10, |
||||
|
error_correction=qrcode.constants.ERROR_CORRECT_L, |
||||
|
box_size=10, |
||||
|
border=4, |
||||
|
) |
||||
|
qr.add_data(text) |
||||
|
qr.make(fit=True) |
||||
|
|
||||
|
if image: |
||||
|
img = qr.make_image(fill_color="black", back_color="white") |
||||
|
img.show() |
||||
|
else: |
||||
|
try: |
||||
|
qr.print_tty() |
||||
|
except: |
||||
|
qr.print_ascii() |
||||
|
|
||||
|
|
||||
|
if __name__ == "__main__": |
||||
|
|
||||
|
x = ObfuscationViaTOR( |
||||
|
algorithm=ObfuscateOBFS4() |
||||
|
) |
||||
|
x.ensure_installed() |
||||
|
x.start() |
||||
|
bridge_line = x.generate_bridge_line(local=False) |
||||
|
x.output_qr(bridge_line, image=True) |
||||
|
#x.generate_bridge_line(local=False) |
File diff suppressed because it is too large
@ -0,0 +1,43 @@ |
|||||
|
<mat-card> |
||||
|
<mat-card-title> |
||||
|
API Keys |
||||
|
</mat-card-title> |
||||
|
|
||||
|
<mat-card-content> |
||||
|
You can use API-Keys to perform authenticated actions. These are less secure than using OAuth2, but at the gain for increased convenience. |
||||
|
<br><b>Note:</b> A newly created API Key will only show <b>once</b>. This means that you have to take note of the key and safe it somewhere safe. |
||||
|
|
||||
|
<table mat-table [dataSource]="dataSource" style="width: 100%"> |
||||
|
|
||||
|
<!-- Id Column --> |
||||
|
<ng-container matColumnDef="id"> |
||||
|
<th mat-header-cell *matHeaderCellDef> ID. </th> |
||||
|
<td mat-cell *matCellDef="let element"> {{element.id}} </td> |
||||
|
</ng-container> |
||||
|
|
||||
|
<!-- Key Column --> |
||||
|
<ng-container matColumnDef="key"> |
||||
|
<th mat-header-cell *matHeaderCellDef> API-Key </th> |
||||
|
<td mat-cell *matCellDef="let element"> {{(element.key) ? element.key : "[HIDDEN]"}} </td> |
||||
|
</ng-container> |
||||
|
|
||||
|
<!-- Created_At Column --> |
||||
|
<ng-container matColumnDef="created_at"> |
||||
|
<th mat-header-cell *matHeaderCellDef> Creation Date </th> |
||||
|
<td mat-cell *matCellDef="let element"> {{element.created_date | date:'medium'}} </td> |
||||
|
</ng-container> |
||||
|
|
||||
|
<!-- Delete Column --> |
||||
|
<ng-container matColumnDef="delete"> |
||||
|
<th mat-header-cell *matHeaderCellDef> Delete </th> |
||||
|
<td mat-cell *matCellDef="let element"> <button mat-flat-button color="warn" (click)="deleteAPIKey(element)">Delete</button></td> |
||||
|
</ng-container> |
||||
|
|
||||
|
<tr mat-header-row *matHeaderRowDef="displayedColumns"></tr> |
||||
|
<tr mat-row *matRowDef="let row; columns: displayedColumns;"></tr> |
||||
|
|
||||
|
</table> |
||||
|
|
||||
|
<button mat-flat-button color="primary" (click)="createAPIKey()">New Key</button> |
||||
|
</mat-card-content> |
||||
|
</mat-card> |
@ -0,0 +1,25 @@ |
|||||
|
import { async, ComponentFixture, TestBed } from '@angular/core/testing'; |
||||
|
|
||||
|
import { ApiKeyComponent } from './api-key.component'; |
||||
|
|
||||
|
describe('ApiKeyComponent', () => { |
||||
|
let component: ApiKeyComponent; |
||||
|
let fixture: ComponentFixture<ApiKeyComponent>; |
||||
|
|
||||
|
beforeEach(async(() => { |
||||
|
TestBed.configureTestingModule({ |
||||
|
declarations: [ ApiKeyComponent ] |
||||
|
}) |
||||
|
.compileComponents(); |
||||
|
})); |
||||
|
|
||||
|
beforeEach(() => { |
||||
|
fixture = TestBed.createComponent(ApiKeyComponent); |
||||
|
component = fixture.componentInstance; |
||||
|
fixture.detectChanges(); |
||||
|
}); |
||||
|
|
||||
|
it('should create', () => { |
||||
|
expect(component).toBeTruthy(); |
||||
|
}); |
||||
|
}); |
@ -0,0 +1,45 @@ |
|||||
|
import {Component, OnInit} from '@angular/core'; |
||||
|
import {ServerService} from "../../../../services/server.service"; |
||||
|
|
||||
|
@Component({ |
||||
|
selector: 'app-api-key', |
||||
|
templateUrl: './api-key.component.html', |
||||
|
styleUrls: ['./api-key.component.scss'] |
||||
|
}) |
||||
|
export class ApiKeyComponent implements OnInit { |
||||
|
|
||||
|
displayedColumns: string[] = ['id', 'key', 'created_at', 'delete']; |
||||
|
dataSource = []; |
||||
|
|
||||
|
constructor(private serverService: ServerService |
||||
|
) { } |
||||
|
|
||||
|
ngOnInit(): void { |
||||
|
|
||||
|
|
||||
|
this.serverService.getAPIKeys().subscribe((apiKeys: Array<any>) => { |
||||
|
this.dataSource = [...apiKeys] |
||||
|
|
||||
|
console.log(this.dataSource) |
||||
|
}) |
||||
|
} |
||||
|
|
||||
|
deleteAPIKey(elem){ |
||||
|
let idx = this.dataSource.indexOf(elem); |
||||
|
this.serverService.deleteAPIKey(elem.id).subscribe(x => { |
||||
|
this.dataSource.splice(idx, 1); |
||||
|
this.dataSource = [...this.dataSource] |
||||
|
}) |
||||
|
} |
||||
|
|
||||
|
createAPIKey(){ |
||||
|
|
||||
|
this.serverService.addAPIKey().subscribe(key => { |
||||
|
this.dataSource.push(key) |
||||
|
this.dataSource = [...this.dataSource] |
||||
|
|
||||
|
}) |
||||
|
|
||||
|
} |
||||
|
|
||||
|
} |
@ -0,0 +1,3 @@ |
|||||
|
.user-edit-component{ |
||||
|
padding: 20px; |
||||
|
} |
@ -0,0 +1,3 @@ |
|||||
|
{ |
||||
|
"language_tabs": [{ "python": "Python" }] |
||||
|
} |
Loading…
Reference in new issue