我真是个新手。我正在尝试创建一个带有进度指示器的按钮,并希望在进度完成时执行一些代码。我使用以下代码以某种方式实现了预期的行为,
反应成分
class ProgressButton extends Component {
constructor() {
super();
this.state = {
progress: 100,
};
this.progressTimer = null;
}
componentDidMount() {
this.progressTimer = setInterval(() => {
if(this.state.progress === 0) {
clearInterval(this.progressTimer);
// Run code on completion
}
this.setState({progress: this.state.progress - 1})
}, 100)
}
render() {
return (
<button style={{backgroundPosition: this.state.progress + '%'}}>
Hello
</button>
);
}
}
CSS
button {
font-size: 16px;
width: 70px;
height: 30px;
border-radius: 4px;
border: none;
background-size: 200% 100%;
background-image: linear-gradient(to right, black 50%, grey 50%);
color: white;
background-position: 100% 0;
}
但是我对此解决方案不满意,因为,
setInterval()
是个好主意。我曾考虑过对CSS @keyframes
动画和setTimeout()
进行同样的操作以在完成时执行代码。但是我也不确定,因为它可能不同步。
有更好的方法吗?
答案 0 :(得分:1)
朋友 我用动画为您简化了方式
import csv
import base64
import json
import io
import avro.schema
import avro.io
from avro.datafile import DataFileReader, DataFileWriter
import math
import os
import gcloud
from gcloud import storage
from google.cloud import bigquery
from oauth2client.client import GoogleCredentials
from datetime import datetime, timedelta
import numpy as np
try:
script_path = os.path.dirname(os.path.abspath(__file__)) + "/"
except:
script_path = "C:\\Users\\me\\Documents\\Keys\\key.json"
#Bigquery Credentials and settings
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = script_path
folder = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
bucket_name = 'gs://new_bucket/table/*.csv'
dataset = 'dataset'
tabela = 'table'
schema = avro.schema.Parse(open("C:\\Users\\me\\schema_table.avsc", "rb").read())
writer = DataFileWriter(open("C:\\Users\\me\\table_register.avro", "wb"), avro.io.DatumWriter(), schema)
def insert_bigquery(target_uri, dataset_id, table_id):
bigquery_client = bigquery.Client()
dataset_ref = bigquery_client.dataset(dataset_id)
job_config = bigquery.LoadJobConfig()
job_config.schema = [
bigquery.SchemaField('id','STRING',mode='REQUIRED')
]
job_config.source_format = bigquery.SourceFormat.CSV
job_config.field_delimiter = ";"
uri = target_uri
load_job = bigquery_client.load_table_from_uri(
uri,
dataset_ref.table(table_id),
job_config=job_config
)
print('Starting job {}'.format(load_job.job_id))
load_job.result()
print('Job finished.')
#insert_bigquery(bucket_name, dataset, tabela)
def get_data_from_bigquery():
"""query bigquery to get data to import to PSQL"""
bq = bigquery.Client()
#Busca IDs
query = """SELECT id FROM dataset.base64_data"""
query_job = bq.query(query)
data = query_job.result()
rows = list(data)
return rows
a = get_data_from_bigquery()
length = len(a)
line_count = 0
for row in range(length):
bytes = base64.b64decode(str(a[row][0]))
bytes = bytes[5:]
buf = io.BytesIO(bytes)
decoder = avro.io.BinaryDecoder(buf)
rec_reader = avro.io.DatumReader(avro.schema.Parse(open("C:\\Users\\me\\schema_table.avsc").read()))
out=rec_reader.read(decoder)
writer.append(out)
writer.close()
def upload_blob(bucket_name, source_file_name, destination_blob_name):
storage_client = storage.Client()
bucket = storage_client.get_bucket(bucket_name)
blob = bucket.blob("insert_transfer/" + destination_blob_name)
blob.upload_from_filename(source_file_name)
print('File {} uploaded to {}'.format(
source_file_name,
destination_blob_name
))
upload_blob('new_bucket', 'C:\\Users\\me\\table_register.avro', 'table_register.avro')
为您
import { Animated} from 'react-native';
//in constructor
this.state = {
progress: new Animated.Value(0);
};
//in componentDidMount
Animated.timing(this.state.progress, {
toValue: 100,//That you are interested
duration: 500
}).start()
//in render
const animator = this.state.progress.interpolate({
inputRange: [0, 100],
outputRange: ['1%', '100%'],
});
//use animator in style
<Animated.View style={{height:animator}}/>
答案 1 :(得分:0)