在Windows上:
import re
import subprocess
output = subprocess.run(['python', '-m', 'black'], stderr=subprocess.PIPE)
stderr = output.stderr.decode()
assert re.match(r'No Path provided. Nothing to do \\U0001f634\r\n', stderr)
但是,在Linux上,我需要在两个地方更改正则表达式:
import re
import subprocess
output = subprocess.run(['python', '-m', 'black'], stderr=subprocess.PIPE)
stderr = output.stderr.decode()
assert re.match(r'No Path provided. Nothing to do \U0001f634\n', stderr)
我了解为什么我需要将\r\n
更改为\n
-这是因为carriage return
但是,为什么我需要将'\\U0001f634'
更改为'\U0001f634'
?有没有一种可以跨平台编写上述断言的方法?
答案 0 :(得分:1)
出现这种差异的原因可能是因为表情符号在Windows中显示为十六进制,在Linux中显示为文字表情符号。
这里的解决方案是通过在\r
后面添加?
量词来匹配r'No Path provided. Nothing to do (?:\\U0001f634|\U0001f634)\r?\n'
作为可选字符,并使用替代方式来匹配转义的表情符号或文字表情符号:
.
如果点是文字点,请考虑将其转义,否则,r'No Path provided\. Nothing to do (?:\\U0001f634|\U0001f634)\r?\n'
匹配除换行符以外的任何字符:
No Path provided. Nothing to do
详细信息
No Path provided. Nothing to do
-文字(?:\\U0001f634|\U0001f634)
文字\
-U0001f634
后跟?
或\r?
表情符号\n
-可选CR private var reader: AVAssetReader?
private var url:URL!
private let queue = DispatchQueue(label: "FileVideoAudioFileReader")
var loop = false
private var videoSampleOutputs:((_ sample:CMSampleBuffer)->())?
private var audioSampleOutputs:((_ sample:CMSampleBuffer)->())?
private var completiton:((_ reader:AVAssetReader?)->())?
private lazy var videoTimmer: CADisplayLink = {
let displayLink = CADisplayLink(target: self, selector: #selector(readVideoSamples))
displayLink.preferredFramesPerSecond = 30
return displayLink
}()
private lazy var audioTimer: CADisplayLink = {
let displayLink = CADisplayLink(target: self, selector: #selector(readAudioSamples))
displayLink.preferredFramesPerSecond = 30
return displayLink
}()
init(url: URL, videoSampleOutputs: ((_ sample:CMSampleBuffer)->())? , audioSampleOutputs:((_ sample:CMSampleBuffer)->())?,completiton:((_ reader:AVAssetReader?)->())?) {
self.videoSampleOutputs = videoSampleOutputs
self.audioSampleOutputs = audioSampleOutputs
self.completiton = completiton
self.url = url
createAssetReader(url: url)
}
private func createAssetReader(url:URL){
let asset = AVAsset(url: url)
reader = try? AVAssetReader(asset: asset)
// Video track
let videoTrack = asset.tracks(withMediaType: .video).first!
let output = AVAssetReaderTrackOutput(track: videoTrack,outputSettings:
[String(kCVPixelBufferPixelFormatTypeKey): NSNumber(value: kCVPixelFormatType_32BGRA)])
output.alwaysCopiesSampleData = true
reader?.add(output)
/*
AVSampleRateKey :44100,
AVNumberOfChannelsKey:2,
AVLinearPCMBitDepthKey:16,
AVLinearPCMIsFloatKey:false,
AVLinearPCMIsNonInterleaved:false,
AVLinearPCMIsBigEndianKey:false
AVSampleRateKey :48000,
*/
if let audioTrack = asset.tracks(withMediaType: .audio).first{
let output = AVAssetReaderTrackOutput(track: audioTrack,outputSettings:
[
AVFormatIDKey:kAudioFormatLinearPCM
]
)
output.alwaysCopiesSampleData = true
reader?.add(output)
}
}
func start() {
queue.async { [unowned self] in
self.reader?.startReading()
self.videoTimmer.add(to: .main, forMode: .default)
self.audioTimer.add(to: .main, forMode: .default)
}
}
func stop() {
queue.async { [unowned self] in
self.stopReading()
}
}
@objc func readAudioSamples() {
queue.async { [unowned self] in
// For Audio
if let buffer = self.reader?.outputs.filter({return $0.mediaType == .audio}).first?.copyNextSampleBuffer(){
self.audioSampleOutputs?(buffer)
}
}
}
@objc func readVideoSamples() {
queue.async { [unowned self] in
// For Video
if let buffer = self.reader?.outputs.filter({return $0.mediaType == .video}).first?.copyNextSampleBuffer() {
// notify all abservers about new sample buffer availability
self.videoSampleOutputs?(buffer)
} else {
print("before stop")
if self.reader?.status == .completed {
if self.loop {
self.reader?.cancelReading()
self.createAssetReader(url: url)
self.reader?.startReading()
} else {
self.stopReading()
}
}else{
// unknown case
fatalError("Intential crash to check if we are reaching here")
//self.stopReading()
}
}
}
}
private func stopReading() {
videoTimmer.invalidate()
reader?.cancelReading()
}
-LF字符。