我有一个应用程序处理带有spherize失真的位图。您可以触摸屏幕并设置包含失真的圆的半径。一旦按下扭曲按钮,就会创建与半径相同的子集位图,并发送该子集位图进行处理。一旦子集失真,则使用来自原始触摸事件的x,y线将其作为覆盖放回到原始位图上。
除了子像素位图的最后一行像素(底部)没有填充像素数据之外,一切都很好。看起来子集位图底部有一条黑线。失真类使用并行编程。这将在运行时检查硬件以找出可用的处理器数量,并相应地将位图分割在处理器上。我对并行化有所帮助,不知道如何找出黑线存在的原因。循环似乎是有序的,任何想法?先谢谢马特。
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.FutureTask;
import android.graphics.Bitmap;
import android.os.Debug;
import android.util.Log;
public class MultiRuntimeProcessorFilter {
private static final String TAG = "mrpf";
private int x = 0;
private Bitmap input = null;
private int radius;
public void createBitmapSections(int nOp, int[] sections){
int processors = nOp;
int jMax = input.getHeight();
int aSectionSize = (int) Math.ceil(jMax/processors);
Log.e(TAG, "++++++++++ sections size = "+aSectionSize);
int k = 0;
for(int h=0; h<processors+1; h++){
sections[h] = k;
k+= aSectionSize;
}
}// end of createBitmapSections()
@SuppressWarnings("unchecked")
public Bitmap barrel (Bitmap input, float k, int r){
this.radius = r;
this.input = input;
int []arr = new int[input.getWidth()*input.getHeight()];
int nrOfProcessors = Runtime.getRuntime().availableProcessors();
int[] sections = new int[nrOfProcessors+1];
createBitmapSections(nrOfProcessors,sections);
ExecutorService threadPool = Executors.newFixedThreadPool(nrOfProcessors);
for(int g=0; g<sections.length;g++){
Log.e(TAG, "++++++++++ sections= "+sections[g]);
}
// ExecutorService threadPool = Executors.newFixedThreadPool(nrOfProcessors);
Object[] task = new Object[nrOfProcessors];
for(int z = 0; z < nrOfProcessors; z++){
task[z] = (FutureTask<PartialResult>) threadPool.submit(new PartialProcessing(sections[z], sections[z+1] - 1, input, k));
Log.e(TAG, "++++++++++ task"+z+"= "+task[z].toString());
}
PartialResult[] results = new PartialResult[nrOfProcessors];
try{
for(int t = 0; t < nrOfProcessors; t++){
results[t] = ((FutureTask<PartialResult>) task[t]).get();
results[t].fill(arr);
}
}catch(Exception e){
e.printStackTrace();
}
Bitmap dst2 = Bitmap.createBitmap(arr,input.getWidth(),input.getHeight(),input.getConfig());
return dst2;
}//end of barrel()
public class PartialResult {
int startP;
int endP;
int[] storedValues;
public PartialResult(int startp, int endp, Bitmap input){
this.startP = startp;
this.endP = endp;
this.storedValues = new int[input.getWidth()*input.getHeight()];
}
public void addValue(int p, int result) {
storedValues[p] = result;
}
public void fill(int[] arr) {
for (int p = startP; p < endP; p++){
for(int b=0;b<radius;b++,x++)
arr[x] = storedValues[x];
}
Log.e(TAG, "++++++++++ x ="+x);
}
}//end of partialResult
public class PartialProcessing implements Callable<PartialResult> {
int startJ;
int endJ;
private int[] scalar;
private float xscale;
private float yscale;
private float xshift;
private float yshift;
private float thresh = 1;
private int [] s1;
private int [] s2;
private int [] s3;
private int [] s4;
private int [] s;
private Bitmap input;
private float k;
public PartialProcessing(int startj, int endj, Bitmap input, float k) {
this.startJ = startj;
this.endJ = endj;
this.input = input;
this.k = k;
s = new int[4];
scalar = new int[4];
s1 = new int[4];
s2 = new int[4];
s3 = new int[4];
s4 = new int[4];
}
int [] getARGB(Bitmap buf,int x, int y){
int rgb = buf.getPixel(y, x); // Returns by default ARGB.
// int [] scalar = new int[4];
// scalar[0] = (rgb >>> 24) & 0xFF;
scalar[1] = (rgb >>> 16) & 0xFF;
scalar[2] = (rgb >>> 8) & 0xFF;
scalar[3] = (rgb >>> 0) & 0xFF;
return scalar;
}
float getRadialX(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = x+((x-cx)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float getRadialY(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = y+((y-cy)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float calc_shift(float x1,float x2,float cx,float k){
float x3 = (float)(x1+(x2-x1)*0.5);
float res1 = x1+((x1-cx)*k*((x1-cx)*(x1-cx)));
float res3 = x3+((x3-cx)*k*((x3-cx)*(x3-cx)));
if(res1>-thresh && res1 < thresh)
return x1;
if(res3<0){
return calc_shift(x3,x2,cx,k);
}
else{
return calc_shift(x1,x3,cx,k);
}
}
void sampleImage(Bitmap arr, float idx0, float idx1)
{
// s = new int [4];
if(idx0<0 || idx1<0 || idx0>(arr.getHeight()-1) || idx1>(arr.getWidth()-1)){
s[0]=0;
s[1]=0;
s[2]=0;
s[3]=0;
return;
}
float idx0_fl=(float) Math.floor(idx0);
float idx0_cl=(float) Math.ceil(idx0);
float idx1_fl=(float) Math.floor(idx1);
float idx1_cl=(float) Math.ceil(idx1);
s1 = getARGB(arr,(int)idx0_fl,(int)idx1_fl);
s2 = getARGB(arr,(int)idx0_fl,(int)idx1_cl);
s3 = getARGB(arr,(int)idx0_cl,(int)idx1_cl);
s4 = getARGB(arr,(int)idx0_cl,(int)idx1_fl);
float x = idx0 - idx0_fl;
float y = idx1 - idx1_fl;
// s[0]= (int) (s1[0]*(1-x)*(1-y) + s2[0]*(1-x)*y + s3[0]*x*y + s4[0]*x*(1-y));
s[1]= (int) (s1[1]*(1-x)*(1-y) + s2[1]*(1-x)*y + s3[1]*x*y + s4[1]*x*(1-y));
s[2]= (int) (s1[2]*(1-x)*(1-y) + s2[2]*(1-x)*y + s3[2]*x*y + s4[2]*x*(1-y));
s[3]= (int) (s1[3]*(1-x)*(1-y) + s2[3]*(1-x)*y + s3[3]*x*y + s4[3]*x*(1-y));
}
@Override public PartialResult call() {
PartialResult partialResult = new PartialResult(startJ, endJ,input);
float centerX=input.getWidth()/2; //center of distortion
float centerY=input.getHeight()/2;
int width = input.getWidth(); //image bounds
int height = input.getHeight();
xshift = calc_shift(0,centerX-1,centerX,k);
float newcenterX = width-centerX;
float xshift_2 = calc_shift(0,newcenterX-1,newcenterX,k);
yshift = calc_shift(0,centerY-1,centerY,k);
float newcenterY = height-centerY;
float yshift_2 = calc_shift(0,newcenterY-1,newcenterY,k);
xscale = (width-xshift-xshift_2)/width;
yscale = (height-yshift-yshift_2)/height;
int p = startJ*radius;
int origPixel = 0;
int color = 0;
int i;
for (int j = startJ; j < endJ; j++){
for ( i = 0; i < width; i++, p++){
origPixel = input.getPixel(i,j);
float x = getRadialX((float)j,(float)i,centerX,centerY,k);
float y = getRadialY((float)j,(float)i,centerX,centerY,k);
sampleImage(input,x,y);
color = ((s[1]&0x0ff)<<16)|((s[2]&0x0ff)<<8)|(s[3]&0x0ff);
//Log.e(TAG, "radius = "+radius);
if(((i-centerX)*(i-centerX) + (j-centerY)*(j-centerY)) <= radius*(radius/4)){
partialResult.addValue(p, color);
}else{
partialResult.addValue(p, origPixel);
}
}//end of inner for
}//end of outer for
return partialResult;
}//end of call
}// end of partialprocessing
}//end of MultiProcesorFilter
[update]我将发布调用barrel方法的视图类。此类获取触摸事件并在处理之前设置失真的半径。在应用失真之前,您可以更多地了解所有内容的设置方式。
public class TouchView extends View{
private File tempFile;
private byte[] imageArray;
private Bitmap bgr;
private Bitmap crop;
private Bitmap crop2;
private Bitmap overLay;
private Bitmap overLay2;
private Paint pTouch;
private float centreX;
private float centreY;
private float centreA;
private float centreB;
private Boolean xyFound = false;
private Boolean abFound = false;
private int Progress = 1;
private static final String TAG = "*********TouchView";
private Filters f = null;
private Filters f2 = null;
private boolean bothCirclesInPlace = false;
private MultiProcessorFilter mpf;
private MultiProcessorFilter mpf2;
private MultiRuntimeProcessorFilter mrpf;
private MultiRuntimeProcessorFilter mrpf2;
private int radius = 50;
protected boolean isLocked = false;
protected boolean isSaved = false;
protected byte [] data;
private float distance1;
private float distance2;
public TouchView(Context context) {
super(context);
}
public TouchView(Context context, AttributeSet attr) {
super(context,attr);
Log.e(TAG, "++++++++++ inside touchview constructor");
tempFile = new File(Environment.getExternalStorageDirectory().
getAbsolutePath() + "/"+"image.jpeg");
imageArray = new byte[(int)tempFile.length()];
// new Thread(new Runnable() {
// public void run() {
try{
InputStream is = new FileInputStream(tempFile);
BufferedInputStream bis = new BufferedInputStream(is);
DataInputStream dis = new DataInputStream(bis);
int i = 0;
while (dis.available() > 0 ) {
imageArray[i] = dis.readByte();
i++;
}
dis.close();
} catch (Exception e) {
e.printStackTrace();
}
// }
// }).start();
Bitmap bm = BitmapFactory.decodeByteArray(imageArray, 0, imageArray.length);
if(bm == null){
Log.e(TAG, "bm = null");
}else{
Log.e(TAG, "bm = not null");
}
bgr = bm.copy(bm.getConfig(), true);
overLay = null;
overLay2 = null;
bm.recycle();
pTouch = new Paint(Paint.ANTI_ALIAS_FLAG);
// pTouch.setXfermode(new PorterDuffXfermode(Mode.SRC_OUT));
pTouch.setColor(Color.RED);
pTouch.setStyle(Paint.Style.STROKE);
}// end of touchView constructor
public void findCirclePixels(){
//f = new Filters();
// f2 = new Filters();
//mpf = new MultiProcessorFilter();
//mpf2 = new MultiProcessorFilter();
mrpf = new MultiRuntimeProcessorFilter();
mrpf2 = new MultiRuntimeProcessorFilter();
crop = Bitmap.createBitmap(bgr,Math.max((int)centreX-radius,0),Math.max((int)centreY-radius,0),radius*2,radius*2);
crop2 = Bitmap.createBitmap(bgr,Math.max((int)centreA-radius,0),Math.max((int)centreB-radius,0),radius*2,radius*2);
new Thread(new Runnable() {
public void run() {
float prog = (float)Progress/150001;
// final Bitmap bgr3 = f.barrel(crop,prog);
// final Bitmap bgr4 = f2.barrel(crop2,prog);
//final Bitmap bgr3 = mpf.barrel(crop,prog);
// final Bitmap bgr4 = mpf2.barrel(crop2,prog);
final Bitmap bgr3 = mrpf.barrel(crop,prog,radius*2);
final Bitmap bgr4 = mrpf2.barrel(crop2,prog, radius*2);
TouchView.this.post(new Runnable() {
public void run() {
TouchView.this.overLay = bgr3;
TouchView.this.overLay2 = bgr4;
TouchView.this.invalidate();
}
});
}
}).start();
}// end of findCirclePixels()
@Override
public boolean onTouchEvent(MotionEvent ev) {
switch (ev.getAction()) {
case MotionEvent.ACTION_DOWN: {
int w = getResources().getDisplayMetrics().widthPixels;
int h = getResources().getDisplayMetrics().heightPixels;
if(ev.getX() <radius || ev.getX() > w - radius ){
// Log.e(TAG, "touch event is too near width edge!!!!!!!!!!");
showToastMessage("You touched too near the screen edge");
break;
}
if(ev.getY() <radius || ev.getY() > h - radius ){
// Log.e(TAG, "touch event is too near height edge!!!!!!!!!!");
showToastMessage("You touched too near the screen edge");
break;
}
distance1 = (float) Math.sqrt(Math.pow(ev.getX() - centreX, 2.0) + Math.pow(ev.getY() - centreY, 2.0));
distance2 = (float) Math.sqrt(Math.pow(ev.getX() - centreA, 2.0) + Math.pow(ev.getY() - centreB, 2.0));
Log.e(TAG, "dist1 = "+distance1 +" distance2 = " + distance2);
if(isLocked == false){
if(abFound == false){
centreA = (int) ev.getX();
centreB = (int) ev.getY();
abFound = true;
invalidate();
}
if(xyFound == false){
centreX = (int) ev.getX();
centreY = (int) ev.getY();
xyFound = true;
invalidate();
}
if(abFound == true && xyFound == true){
bothCirclesInPlace = true;
}
break;
}
}
case MotionEvent.ACTION_MOVE: {
if(isLocked == false){
/*if(xyFound == false){
centreX = (int) ev.getX()-70;
centreY = (int) ev.getY()-70;
xyFound = true;
}else{
centreA = (int) ev.getX()-70;
centreB = (int) ev.getY()-70;
bothCirclesInPlace = true;
invalidate();
}
*/
if(distance1 < distance2){
centreX = (int) ev.getX();
centreY = (int) ev.getY();
xyFound = true;
invalidate();
}else{
centreA = (int) ev.getX();
centreB = (int) ev.getY();
bothCirclesInPlace = true;
invalidate();
}
break;
}
}
case MotionEvent.ACTION_UP:
break;
}
return true;
}//end of onTouchEvent
public void initSlider(final HorizontalSlider slider)
{
slider.setOnProgressChangeListener(changeListener);
}
private OnProgressChangeListener changeListener = new OnProgressChangeListener() {
@Override
public void onProgressChanged(View v, int progress) {
if(isLocked == true){
setProgress(progress);
}else{
Toast.makeText(TouchView.this.getContext(), "press lock before applying distortion ", Toast.LENGTH_SHORT).show();
}
}
};
@Override
public void onDraw(Canvas canvas){
super.onDraw(canvas);
Log.e(TAG, "******about to draw bgr ");
canvas.drawBitmap(bgr, 0, 0, null);
if(isSaved == false){
if (isLocked == true && bothCirclesInPlace == true){
if(overLay != null)
canvas.drawBitmap(overLay, centreX-radius, centreY-radius, null);
if(overLay2 != null)
canvas.drawBitmap(overLay2, centreA-radius, centreB-radius, null);
}
if(bothCirclesInPlace == true && isLocked == false){
canvas.drawCircle(centreX, centreY, radius,pTouch);
canvas.drawCircle(centreA, centreB, radius,pTouch);
}
}else{
// String mFilePath : Absolute Path of the file to be saved
// Bitmap mBitmap1 : First bitmap. This goes as background.
// Bitmap mCBitmap : Bitmap associated with the Canvas. All draws on the canvas are drawn into this bitmap.
// Bitmap mBitmap2 : Second bitmap. This goes on top of first (in this example serves as foreground.
// Paint mPaint1 : Paint to draw first bitmap
// Paint mPaint2 : Paint to draw second bitmap on top of first bitmap
isSaved = false;
Bitmap mCBitmap = Bitmap.createBitmap(bgr.getWidth(), bgr.getHeight(), bgr.getConfig());
Canvas tCanvas = new Canvas(mCBitmap);
tCanvas.drawBitmap(bgr, 0, 0, null);
if(overLay != null)
tCanvas.drawBitmap(overLay, centreX-radius, centreY-radius, null);
if(overLay2 != null)
tCanvas.drawBitmap(overLay2, centreA-radius, centreB-radius, null);
canvas.drawBitmap(mCBitmap, 0, 0, null);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
mCBitmap.compress(CompressFormat.JPEG, 100 /*ignored for PNG*/, bos);
data = bos.toByteArray();
try {
bos.flush();
bos.close();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
bos.flush();
bos.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if ( data == null){
Log.e(TAG, "data in touchview before save clicked is null");
}else{
Log.e(TAG, "data in touchview before saved clicked is not null");
}
}
}//end of onDraw
protected void setProgress(int progress2) {
Log.e(TAG, "***********in SETPROGRESS");
this.Progress = progress2;
findCirclePixels();
}
public int getRadius() {
return radius;
}
public void setRadius(int r) {
radius = r;
invalidate();
}
public void showToastMessage(String mess){
Toast.makeText(TouchView.this.getContext(), mess.toString(), Toast.LENGTH_SHORT).show();
}
}
答案 0 :(得分:0)
我的猜测是,当处理图像的底部时,由于桶方法中的半径,操作部分地在输入图像上操作,部分在图像外部操作。在实际图像范围之外操作时,边缘经常会引起问题,结果为0,这会导致黑线...
我建议您尝试增加图片的大小:
@SuppressWarnings("unchecked")
public Bitmap barrel (Bitmap input, float k, int r){
this.radius = r;
this.input = input;
// Add an offset to the width and height equal to the radius
// To avoid performing processing outside the bounds of the input image
int []arr = new int[(input.getWidth() + this.radius) * (input.getHeight() + this.radius)];
// Continue...
同样,这是我的第一次猜测,我现在没时间检查,但肯定是先调查边缘,这是我的建议。
答案 1 :(得分:0)
BitmapDrawable bmpd = new BitmapDrawable(input);
int []arr = new int[(bmpd.getIntrinsicWidth() + this.radius) * (bmpd. getIntrinsicHeight() + this.radius)];
答案 2 :(得分:0)
您的问题很可能与您假设的图像坐标系和spherize算法有关。
请参阅MathWorks Image Coordinate Systems
我希望您根据Pixel Indices方法处理输入/输出图像,但是spherize算法正在使用空间坐标系处理您的数据。这通常会导致已处理图像的最外边框为空白,因为该算法已将图像向上和向左平移0.5像素。原系统中的坐标3现在在新系统中为3.5,并且超出了计算范围。
这实际上是2D到3D图像处理算法中的一个大问题,因为两个空间之间的投影并不是微不足道的,微小的实现差异会引起明显的问题。请注意Pixel Indices坐标系是3x3,但空间坐标系统基本上是4x4。
尝试将您的spherize桶设置为宽度+ 1 /高度+ 1而不是宽度/高度,看看是否填满了您丢失的行。