我的数据的日期格式为yyyy-MM-dd ex:" 2015-10-12"
我的logstash日期过滤器如下所示
let overlapFactor = 1.1
CATransaction.setAnimationDuration(
(chordLayer.chord.lengthInSeconds ?? 0.0)
* overlapFactor // <- ADDED OVERLAP FACTOR HERE
)
bounds.origin.x += distance*CGFloat(overlapFactor) // <- ADDED OVERLAP FACTOR HERE
CATransaction.commit()
但是,我的es实例无法解析它并且我收到了以下错误
input {
file {
path => "/etc/logstash/immport.csv"
codec => multiline {
pattern => "^S*"
negate => true
what => "previous"
}
start_position => "beginning"
}
}
filter {
csv {
separator => ","
autodetect_column_names => true
skip_empty_columns => true
}
date {
match => ["start_date", "yyyy-MM-dd"]
target => "start_date"
}
mutate {
rename => {"start_date" => "[study][startDate]"}
}
}
output {
elasticsearch {
action => "index"
hosts => ["elasticsearch-5-6:9200"]
index => "immport12"
document_type => "dataset"
template => "/etc/logstash/immport-mapping.json"
template_name => "mapping_template"
template_overwrite => true
}
stdout { codec => rubydebug }
}
示例数据行 ] [logstash.outputs.elasticsearch]无法将事件索引到Elasticsearch。 {:status =&gt; 400,:action =&gt; [&#34; index&#34;,{:_id =&gt; nil,:_index =&gt;&#34; immport_2017_12_02&#34;,:_ type =&gt; &#34;数据集&#34;,:_routing =&gt; nil},2017-12-20T08:55:45.367Z 878192e51991 SDY816,HEPSV_COHORT:收到Heplisav ,,,, 2012-04-17,10.0,Systems Biology的参与者对特异性细胞亚群中许可乙型肝炎疫苗(HEPLISAV)的反应分析(参见同伴研究SDY299和SDY690),介入性,http://www.immport.org/immport-open/public/study/study/displayStudyDetail/SDY816,,Interventional,Vaccine反应,智人,细胞,DNA微阵列],:响应=&gt; {& #34;索引&#34; =&gt; {&#34; _index&#34; =&gt;&#34; immport_2017_12_02&#34;,&#34; _type&#34; =&gt;&#34;数据集&#34; ,&#34; _id&#34; =&gt;&#34; AWBzIsBPov62ZQtaldxQ&#34;,&#34; status&#34; =&gt; 400,&#34;错误&#34; =&gt; {&#34;输入&#34; =&gt;&#34; mapper_parsing_exception&#34;,&#34;原因&#34; =&gt;&#34;无法解析[study.startDate]&#34;,&#34; caused_by&# 34; =&gt; {&#34;输入&#34; =&gt;&#34; illegal_argument_exception&#34;,&#34;原因&#34; =&gt;&#34;格式无效:\&#34; 2012 -04-17T00:00:00.000Z \&#34;在\&#34; T00:00:00.000Z \&#34;&#34;}}}}}
我希望我的logstash以这种格式输出日期yyyy-MM-dd,没有时间戳 映射模板
"error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse [study.startDate]", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Invalid format: \"2012-04-17T00:00:00.000Z\" is malformed at \"T00:00:00.000Z\""}}}}}
答案 0 :(得分:2)
我在我的机器上尝试了这个,参考了你的logstash conf文件,它工作正常。
我的Logstash配置文件:
input {
file {
path => "D:\testdata\stack.csv"
codec => multiline {
pattern => "^S*"
negate => true
what => "previous"
}
start_position => "beginning"
}
}
filter {
csv {
separator => ","
autodetect_column_names => true
skip_empty_columns => true
}
date {
match => ["dob", "yyyy-MM-dd"]
target => "dob"
}
mutate {
rename => {"dob" => "[study][dob]"}
}
}
output {
elasticsearch {
action => "index"
hosts => ["localhost:9200"]
index => "stack"
}
stdout { codec => rubydebug }
}
CSV文件:
id,name,rollno,dob,age,gender,comments
1,hatim,88,1992-07-30,25,male,qsdsdadasd asdas das dasd asd asd asd as dd sa d
2,hatim,89,1992-07-30,25,male,qsdsdadasd asdas das dasd asd asd asd as dd sa d
索引后的Elasticsearch文档:
{
"_index": "stack",
"_type": "doc",
"_id": "wuBTeGABQ7gwBQSQTX1q",
"_score": 1,
"_source": {
"path": """D:\testdata\stack.csv""",
"study": {
"dob": "1992-07-29T18:30:00.000Z"
},
"@timestamp": "2017-12-21T09:06:52.465Z",
"comments": "qsdsdadasd asdas das dasd asd asd asd as dd sa d",
"gender": "male",
"@version": "1",
"host": "INMUCHPC03284",
"name": "hatim",
"rollno": "88",
"id": "1",
"message": "1,hatim,88,1992-07-30,25,male,qsdsdadasd asdas das dasd asd asd asd as dd sa d\r",
"age": "25"
}
}
一切都很完美。看看这个例子是否可以帮助你。
答案 1 :(得分:1)
问题是我将logstash映射模板名称更改为新名称,我没有删除旧模板文件,因此索引仍然指向旧模板文件
删除旧模板文件后
curl -XDELETE 'http://localhost:9200/_templates/test_template'
它有效,所以每当我们使用新模板时,都需要删除旧模板然后处理记录