-
Notifications
You must be signed in to change notification settings - Fork 1
/
conference.bib
4676 lines (4473 loc) · 239 KB
/
conference.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
% This file was created with JabRef 2.6.
% Encoding: MacRoman
@CONFERENCE{herrmann04cdm,
abstract = {Predictive multiple suppression methods consist of two main steps:
a prediction step, in which multiples are predicted from the seismic
data, and a subtraction step, in which the predicted multiples are
matched with the true multiples in the data. The last step appears
crucial in practice: an incorrect adaptive subtraction method will
cause multiples to be sub-optimally subtracted or primaries being
distorted, or both. Therefore, we propose a new domain for separation
of primaries and multiples via the Curvelet transform. This transform
maps the data into almost orthogonal localized events with a directional
and spatial-temporal component. The multiples are suppressed by thresholding
the input data at those Curvelet components where the predicted multiples
have large amplitudes. In this way the more traditional filtering
of predicted multiples to fit the input data is avoided. An initial
field data example shows a considerable improvement in multiple suppression.
{\copyright}2004 Society of Exploration Geophysicists},
author = {Felix J. Herrmann and D. J. Verschuur},
booktitle = {SEG Technical Program Expanded Abstracts},
doi = {10.1190/1.1851110},
keywords = {SLIM},
number = {1},
organization = {SEG},
pages = {1333-1336},
presentation = {http://slim.eos.ubc.ca/Publications/Public/Presentations/seg/seg04/herrmann04cdm.pdf
},
publisher = {SEG},
title = {Curvelet-domain multiple elimination with sparseness constraints},
url = {http://slim.eos.ubc.ca/~felix/public/SEGM2004.pdf},
volume = {23},
year = {2004}
@CONFERENCE{aravkin11eage,
author = {Aleksandr Y. Aravkin and James V. Burke and Felix J. Herrmann and
Tristan van Leeuwen},
title = {A Nonlinear Sparsity Promoting Formulation and Algorithm for Full
Waveform Inversion},
year = {2011},
organization = {EAGE},
publisher = {EAGE},
abstract = {Full Waveform Inversion (FWI) is a computational procedure to extract
medium parameters from seismic data. FWI is typically formulated
as a nonlinear least squares optimization problem, and various regularization
techniques are used to guide the optimization because the problem
is illposed. In this paper, we propose a novel sparse regularization
which exploits the ability of curvelets to efficiently represent
geophysical images. We then formulate a corresponding sparsity promoting
constrained optimization problem, which we call Nonlinear Basis Pursuit
Denoise (NBPDN) and present an algorithm to solve this problem to
recover medium parameters. The utility of the NBPDN formulation and
efficacy of the algorithm are demonstrated on a stylized cross-well
exper- iment, where a sparse velocity perturbation is recovered with
higher quality than the standard FWI formulation (solved with LBFGS).
The NBPDN formulation and algorithm can recover the sparse perturbation
even when the data volume is compressed to 5 percent of the original
size using random superposition.},
file = {AravkinEAGE2011submit.pdf:http\://slim.eos.ubc.ca/Publications/Public/Conferences/EAGE/2011/AravkinEAGE2011submit.pdf:PDF},
keywords = {EAGE},
presentations = {http://slim.eos.ubc.ca/Publications/Public/Presentations/eage/eage11/aravkin2011eage.pdf
},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/EAGE/2011/AravkinEAGE2011submit.pdf}
}
@CONFERENCE{AA11fwi,
author = {Aleksandr Y. Aravkin and Felix J. Herrmann and Tristan van Leeuwen
and James V. Burke and Xiang Li},
title = {Full Waveform Inversion with Compressive Updates},
year = {2011},
organization = {SIAM CS\&E 2011},
publisher = {SIAM CS\&E 2011},
abstract = {Full-waveform inversion relies on large multi-experiment data volumes.
While improvements in acquisition and inversion have been extremely
successful, the current push for higher quality models reveals fundamental
shortcomings handling increasing problem sizes numerically. To address
this fundamental issue, we propose a randomized dimensionality-reduction
strategy motivated by recent developments in stochastic optimization
and compressive sensing. In this formulation conventional Gauss-Newton
iterations are replaced by dimensionality-reduced sparse recovery
problems with source encodings.},
presentation = {http://slim/Publications/Public/Presentations/2011/Aravkin2.28.2011.pdf}
}
@CONFERENCE{AA11nbpdn,
author = {Aleksandr Y. Aravkin and Tristan van Leeuwen and James Burke and
Felix J. Herrmann},
title = {Sparsity promoting formulations and algorithms for FWI. Presented
at AMP Medical and Seismic Imaging, 2011, Vancouver BC.},
year = {2011},
organization = {ICIAM 2011},
abstract = {Full Waveform Inversion (FWI) is a computational procedure to extract
medium parameters from seismic data. FWI is typically formulated
as a nonlinear least squares optimization problem, and various regularization
techniques are used to guide the optimization because the problem
is ill-posed. We propose a novel sparse regularization which exploits
the ability of curvelets to efficiently represent geophysical images.
We then formulate a corresponding sparsity promoting constrained
optimization problem, which we solve using an open source algorithm.
The techniques are applicable to any inverse problem where sparsity
modeling is appropriate.
We demonstrate the efficacy of the formulation on a toy example (stylized
cross-well experiment) and on a realistic Seismic example (partial
Marmoussi model). We also discuss the tradeoff between model fit
and sparsity promotion, with a view to extend existing techniques
for linear inverse problems to the case where the forward model is
nonlinear. },
date-added = {2011-07-15},
presentation = {http://slim.eos.ubc.ca/Publications/Public/Presentations/2011/aravkin2011AMP.pdf},
url = {http://slim/Publications/Public/Presentations/2011/aravkin2011AMP.pdf}
}
@CONFERENCE{AA11robustfwi,
author = {Aleksandr Y. Aravkin and Tristan van Leeuwen and Felix J. Herrmann},
title = {Robust FWI using Student's t-distribution. Presented at Waves 2011,
Vancouver BC.Presented at Waves 2011, Vancouver BC.},
year = {2011},
organization = {ICIAM 2011},
abstract = {Iterative inversion algorithms require repeated simulation of 3D time-dependent
acoustic, elastic, or electromagnetic wave fields, extending hundreds
of wavelengths and hundreds of periods. Also, seismic data is rich
in information at every representable scale. Thus simulation-driven
optimization approaches to inversion impose great demands on simulator
efficiency and accuracy. While computer hardware advances have been
of critical importance in bringing inversion closer to practical
application, algorithmic advances in simulator methodology have been
equally important. Speakers in this two-part session will address
a variety of numerical issues arising in the wave simulation, and
in its application to inversion. },
date-added = {2011-07-20},
presentation = {http://slim.eos.ubc.ca/Publications/Public/Presentations/2011/aravkin2011WAVES.pdf},
url = {http://slim/Publications/Public/Presentations/2011/aravkin2011WAVES.pdf}
}
@CONFERENCE{aravkin11robust,
author = {Aleksandr Y. Aravkin and Tristan van Leeuwen and Felix J. Herrmann},
title = {Robust full-waveform inversion using the Student's t-distribution},
year = {2011},
organization = {SEG},
publisher = {SEG},
abstract = {Full-waveform inversion (FWI) is a computational procedure to extract
medium parameters from seismic data. Robust meth- ods for FWI are
needed to overcome sensitivity to noise and in cases where modeling
is particularly poor or far from the real data generating process.
We survey previous robust methods from a statistical perspective,
and use this perspective to derive a new robust method by assuming
the random errors in our model arise from the Student's t-distribution.
We show that in contrast to previous robust methods, the new method
progres- sively down-weighs large outliers, effectively ignoring
them once they are large enough. This suggests that the new method
is more robust and suitable for situations with very poor data quality
or modeling. Experiments show that the new method recovers as well
or better than previous robust methods, and can recover models with
quality comparable to standard meth- ods on noise-free data when
some of the data is completely corrupted, and even when a marine
acquisition mask is entirely ignored in the modeling. The ability
to ignore a marine acqui- sition mask via robust FWI methods offers
an opportunity for stochastic optimization methods in marine acquisition.},
keywords = {SEG},
timestamp = {2011-04-06 15:00:00 -0700},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/SEG/2011/aravkin2011seg.pdf}
}
@CONFERENCE{Berg08sat,
author = {E. van den Berg},
title = {Sparco: A testing framework for sparse reconstruction},
booktitle = {SINBAD 2008},
year = {2008},
abstract = {Sparco is a framework for testing and benchmarking algorithms for
sparse reconstruction. It includes a large collection of sparse reconstruction
problems drawn from the imaging, compressed sensing, and geophysics
literature. Sparco is also a framework for implementing new test
problems and can be used as a tool for reproducible research. We
describe the software environment, and demonstrate its usefulness
for testing and comparing solvers for sparse reconstruction.},
date-modified = {2008-08-22 12:54:25 -0700},
keywords = {SLIM, SINBAD, Presentation},
url = {http://slim.eos.ubc.ca/SINBAD2008/Program_files/SINBAD2008_Ewout_Spar.pdf}
}
@CONFERENCE{vandenberg08esr,
author = {E. van den Berg},
title = {Exact sparse reconstruction and neighbourly polytopes},
booktitle = {IAM},
year = {2008},
bdsk-url-1 = {http://slim.eos.ubc.ca/Publications/Private/Presentations/2008/vandenberg08iam_pres.pdf},
date-added = {2008-08-26 15:44:44 -0700},
date-modified = {2008-08-26 15:45:58 -0700},
keywords = {SLIM, IAM, Presentation},
presentation = {http://slim.eos.ubc.ca/Publications/Private/Presentations/2008/vandenberg08iam_pres.pdf}
}
@CONFERENCE{friedlander09bsem,
author = {E. van den Berg and Michael P. Friedlander},
title = {Spot: A linear-operator toolbox for Matlab},
booktitle = {SCAIM Seminar},
year = {2009},
address = {University of British Columbia},
keywords = {minimization, Presentation, SLIM}
}
@CONFERENCE{vandenberg07ipo1,
author = {E. van den Berg and Michael P. Friedlander},
title = {In Pursuit of a Root},
booktitle = {2007 Von Neumann Symposium},
year = {2007},
keywords = {minimization, Presentation, SLIM},
url = {http://www.cs.ubc.ca/~mpf/public/mpf07ams.pdf}
}
@CONFERENCE{vandenberg08ocf,
author = {E. van den Berg and Mark Schmidt and Michael P. Friedlander and K.
Murphy},
title = {Optimizing Costly Functions with Simple Constraints: A Limited-Memory
Projected Quasi-Newton Algorithm},
year = {2009},
volume = {12},
series = {Twelfth International Conference on Artificial Intelligence and Statistics},
month = {April},
abstract = {An optimization algorithm for minimizing a smooth function over a
convex set is described. Each iteration of the method computes a
descent direction by minimizing, over the original constraints, a
diagonal-plus low-rank quadratic approximation to the function. The
quadratic approximation is constructed using a limited-memory quasi-Newton
update. The method is suitable for large-scale problems where evaluation
of the function is substan- tially more expensive than projection
onto the constraint set. Numerical experiments on one- norm regularized
test problems indicate that the proposed method is competitve with
state- of-the-art methods such as bound-constrained L-BFGS and orthant-wise
descent. We further show that the method generalizes to a wide class
of problems, and substantially improves on state-of-the-art methods
for problems such as learning the structure of Gaussian graphi- cal
models (involving positive-definite matrix constraints) and Markov
random fields (in- volving second-order cone constraints).},
date-added = {2009-01-29 17:16:34 -0800},
date-modified = {2009-01-29 17:16:34 -0800},
keywords = {SLIM},
pdf = {http://www.cs.ubc.ca/~mpf/public/group.pdf}
}
@CONFERENCE{beyreuther05cot,
author = {Moritz Beyreuther and J. Cristall and Felix J. Herrmann},
title = {Computation of time-lapse differences with {3-D} directional frames},
booktitle = {SEG Technical Program Expanded Abstracts},
year = {2005},
volume = {24},
number = {1},
pages = {2488-2491},
organization = {SEG},
publisher = {SEG},
abstract = {We present an alternative method of extracting production related
differences from time-lapse seismic data sets. Our method is not
based on the actual subtraction of the two data sets, risking the
enhancement of noise and introduction of artifacts due to local phase
rotation and slightly misaligned events. Rather, it mutes events
of the monitor survey with respect to the baseline survey based on
the magnitudes of coefficients in a sparse and local atomic decomposition.
Our technique is demonstrated to be an effective tool for enhancing
the time-lapse signal from surveys which have been cross-equalized.
{\copyright}2005 Society of Exploration Geophysicists},
doi = {10.1190/1.2148227},
file = {1:http\://link.aip.org/link/?SGA/24/2488/1:PDF},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/SEG/seg4D2005.pdf}
}
@CONFERENCE{beyreuther04cdo,
author = {Moritz Beyreuther and Felix J. Herrmann and J. Cristall},
title = {Curvelet denoising of 4-D seismic},
booktitle = {EAGE Technical Program Expanded Abstracts},
year = {2004},
month = {June},
organization = {EAGE},
publisher = {EAGE},
abstract = {With burgeoning world demand and a limited rate of discovery of new
reserves, there is increasing impetus upon the industry to optimize
recovery from already existing fields. 4D, or time-lapse, seismic
imaging is an emerging technology that holds great promise to better
monitor and optimise reservoir production. The basic idea behind
4D seismic is that when multiple 3D surveys are acquired at separate
calendar times over a producing field, the reservoir geology will
not change from survey to survey but the state of the reservoir fluids
will change. Thus, taking the difference between two 3D surveys should
remove the static geologic contribution to the data and isolate the
time- varying fluid flow component. However, a major challenge in
4D seismic is that acquisition and processing differences between
3D surveys often overshadow the changes caused by fluid flow. This
problem is compounded when 4D effects are sought to be derived from
vintage 3D data sets that were not originally acquired with 4D in
mind. The goal of this study is to remove the acquisition and imaging
artefacts from a 4D seismic difference cube using Curvelet processing
techniques.},
file = {EAGE4D2004.pdf:http\://slim.eos.ubc.ca/~felix/public/EAGE4D2004.pdf:PDF},
keywords = {SLIM},
presentation = {http://slim.eos.ubc.ca/Publications/Public/Conferences/SEG/2008/yarham08seg.pdf
},
url = {http://slim.eos.ubc.ca/~felix/public/Herrmann_F_Curvelet_imaging.doc}
}
@CONFERENCE{challa07srf,
author = {Sastry S. Challa and Gilles Hennenfent and Felix J. Herrmann},
title = {Signal reconstruction from incomplete and misplaced measurements},
booktitle = {EAGE Technical Program Expanded Abstracts},
year = {2007},
month = {June},
organization = {EAGE},
publisher = {EAGE},
abstract = {Constrained by practical and economical considerations, one often
uses seismic data with missing traces. The use of such data results
in image artifacts and poor spatial resolution. Sometimes due to
practical limitations, measurements may be available on a perturbed
grid, instead of on the designated grid. Due to algorithmic requirements,
when such measurements are viewed as those on the designated grid,
the recovery procedures may result in additional artifacts. This
paper interpolates incomplete data onto regular grid via the Fourier
domain, using a recently developed greedy algorithm. The basic objective
is to study experimentally as to what could be the size of the perturbation
in measurement coordinates that allows for the measurements on the
perturbed grid to be considered as on the designated grid for faithful
recovery. Our experimental work shows that for compressible signals,
a uniformly distributed perturbation can be offset with slightly
more number of measurements.},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/EAGE/2007/sastry07.pdf}
}
@CONFERENCE{cristall04cpa,
author = {J. Cristall and Moritz Beyreuther and Felix J. Herrmann},
title = {Curvelet processing and imaging: 4-D adaptive subtraction},
booktitle = {CSEG Technical Program Expanded Abstracts},
year = {2004},
month = {May},
organization = {CSEG},
publisher = {CSEG},
abstract = {With burgeoning world demand and a limited rate of discovery of new
reserves, there is increasing impetus upon the industry to optimize
recovery from already existing fields. 4D, or time-lapse, seismic
imaging holds great promise to better monitor and optimise reservoir
production. The basic idea behind 4D seismic is that when multiple
3D surveys are acquired at separate calendar times over a producing
field, the reservoir geology will not change from survey to survey
but the state of the reservoir fluids will change. Thus, taking the
difference between two 3D surveys should remove the static geologic
contribution to the data and isolate the time-varying fluid flow
component. However, a major challenge in 4D seismic is that acquisition
and processing differences between 3D surveys often overshadow the
changes caused by fluid flow. This problem is compounded when 4D
effects are sought to be derived from legacy 3D data sets that were
not originally acquired with 4D in mind. The goal of this study is
to remove the acquisition and imaging artefacts from a 4D seismic
difference cube using Curvelet processing techniques.},
keywords = {SLIM},
url = {http://www.cseg.ca/conventions/abstracts/2004/2004abstracts/059S0201-Cristall_J_Curvelet_4D.pdf}
}
@CONFERENCE{erlangga09fwi,
author = {Yogi A. Erlangga and Felix J. Herrmann},
title = {Full-Waveform Inversion with Gauss-Newton-Krylov Method},
booktitle = {SEG},
year = {2009},
abstract = {This abstract discusses an implicit implementation of the Gauss-Newton
method, used for the frequency-domain full-waveform inversion, where
the inverse of the Hessian for the update is never formed explicitly.
Instead, the inverse of the Hessian is computed approximately by
a conjugate gradient (CG) method, which only requires the action
of the Hessian on the CG search direction. This procedure avoids
an excessive computer storage, usually needed for storing the Hessian,
at the expense of extra computational work in CG. An effective preconditioner
for the Hessian is important to improve the convergence of CG, and
hence to reduce the overall computational work.},
keywords = {Presentation, SEG, SLIM},
presentation = {http://slim.eos.ubc.ca/Publications/public/presentations/seg/seg09/erlangga09segfwi.pdf}
}
@CONFERENCE{Erlangga09mwi,
author = {Yogi A. Erlangga and Felix J. Herrmann},
title = {Migration with implicit solvers for the time-harmonic Helmholtz equation},
year = {2009},
organization = {EAGE},
publisher = {EAGE},
abstract = {From the measured seismic data, the location and the amplitude of
reflectors can be determined via a migration algorithm. Classically,
following Claerbout{\textquoteright}s imaging principle [2], a reflector
is located at the position where the source{\textquoteright}s forward-propagated
wavefield correlates with the backward-propagated wavefield of the
receiver data. Lailly and Tarantola later showed that this imaging
principle is an instance of inverse problems, with the associated
migration operator formulated via a least-squares functional; see
[6, 12, 13]. Furthermore, they showed that the migrated image is
associated with the gradient of this functional with respect to the
image. If the solution of the least-squares functional is done iteratively,
the correlation-based image coincides up to a constant with the first
iteration of a gradient method. In practice, this migration is done
either in the time domain or in the frequency domain. In the frequency-domain
migration, the main bottleneck thus far, which renders its full implementation
to large scale problems, is the lack of efficient solvers for computing
wavefields. Robust direct methods easily run into excessive memory
requirements as the size of the problem increases. On the other hand,
iterative methods, which are less demanding in terms of memory, suffered
from lack of convergence. During the past years, however, progress
has been made in the development of an efficient iterative method
[4, 3] for the frequency-domain wavefield computations. In this paper,
we will show the significance of this method (called MKMG) in the
context of the frequency-domain migration, where multi-shot-frequency
wavefields (of order of 10,000 related wavefields) need to be computed.},
keywords = {EAGE migration},
presentation = {http://slim.eos.ubc.ca/Publications/Public/Presentations/2009/erlanggaEAGE2009.pdf
},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/EAGE/2009/erlangga2009.pdf}
}
@CONFERENCE{erlangga09swi,
author = {Yogi A. Erlangga and Felix J. Herrmann},
title = {Seismic waveform inversion with Gauss-Newton-Krylov method},
booktitle = {SEG Technical Program Expanded Abstracts},
year = {2009},
volume = {28},
number = {1},
pages = {2357-2361},
organization = {SEG},
publisher = {SEG},
abstract = {This abstract discusses an implicit implementation of the Gauss-Newton
method, used for the frequency-domain full-waveform inversion, where
the inverse of the Hessian for the update is never formed explicitly.
Instead, the inverse of the Hessian is computed approximately by
a conjugate gradient (CG) method, which only requires the action
of the Hessian on the CG search direction. This procedure avoids
an excessive computer storage, usually needed for storing the Hessian,
at the expense of extra computational work in CG. An effective preconditioner
for the Hessian is important to improve the convergence of CG, and
hence to reduce the overall computational work.},
keywords = {SEG},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/SEG/2009/erlangga09segswi.pdf}
}
@CONFERENCE{erlangga08aim,
author = {Yogi A. Erlangga and Felix J. Herrmann},
title = {An iterative multilevel method for computing wavefields in frequency-domain
seismic inversion},
booktitle = {SEG Technical Program Expanded Abstracts},
year = {2008},
volume = {27},
number = {1},
pages = {1957-1960},
month = {November},
organization = {SEG},
publisher = {SEG},
abstract = {We describe an iterative multilevel method for solving linear systems
representing forward modeling and back propagation of wavefields
in frequency-domain seismic inversions. The workhorse of the method
is the so-called multilevel Krylov method, applied to a multigrid-preconditioned
linear system, and is called multigrid-multilevel Krylov (MKMG) method.
Numerical experiments are presented for 2D Marmousi synthetic model
for a range of frequencies. The convergence of the method is fast,
and depends only mildly on frequency. The method can be considered
as the first viable alternative to LU factorization, which is practically
prohibitive for 3D seismic inversions.},
keywords = {SLIM},
presentation = {http://slim.eos.ubc.ca/Publications/Public/Presentations/seg/seg08/erlangga08imm.pdf
},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/SEG/2008/erlangga08seg.pdf}
}
@CONFERENCE{erlangga08imf,
author = {Yogi A. Erlangga and K. Vuik and K. Oosterlee and D. Riyanti and
R. Nabben},
title = {Iterative methods for 2D/3D Helmholtz operator},
booktitle = {SINBAD 2008},
year = {2008},
abstract = {We present an iterative method for solving the 2D/3D Helmholtz equation.
The method is mainly based on a Krylov method, preconditioned by
a special operator which represents a damped Helmholtz operator.
The discretization of the preconditioning operator is then solved
by one multigrid sweep. It can be shown that while the spectrum is
bounded above by one, the smallest eigenvalue of the preconditioned
system is of order $k^{-1}$. In this situation, the convergence of
a Krylov method will be proportional to the frequency of the problem.
Further convergence acceleration can be achieved if eigenvalues of
order $k^{-1}$ are projected from the spectrum. This can be done
by a projection operator, similar to but more stable than deflation.
This projection operator has been the core of a new multilevel method,
called multilevel Krylov method, proposed by Erlangga and Nabben
only recently. Putting the preconditioned Helmholtz operator in this
setting, a convergence which is independent of frequency can be obtained.},
keywords = {Presentation, SINBAD, SLIM},
url = {http://slim.eos.ubc.ca/SINBAD2008/Program_files/SINBAD2008_Erlangga_Ite.pdf}
}
@CONFERENCE{eso08ira,
author = {R. A. Eso and S. Napier and Felix J. Herrmann and D. W. Oldenburg},
title = {Iterative reconstruction algorithm for non-linear operators},
booktitle = {SEG Technical Program Expanded Abstracts},
year = {2008},
volume = {27},
number = {1},
pages = {579-583},
month = {November},
organization = {SEG},
publisher = {SEG},
abstract = {Iterative soft thresholding of a models wavelet coefficients can be
used to obtain models that are sparse with respect to a known basis
function. We generate sparse models for non-linear forward operators
by applying the soft thresholding operator to the model obtained
through a Gauss-Newton iteration and apply the technique in a synthetic
2.5D DC resistivity crosswell tomographic example.},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/SEG/2008/eso08seg.pdf}
}
@CONFERENCE{friedlander09a,
author = {Michael P. Friedlander},
title = {Computing sparse and group-sparse approximations},
booktitle = {2009 High Performance Scientific Computing Conference},
year = {2009},
address = {Hanoi, Vietnam},
keywords = {minimization, Presentation, SLIM}
}
@CONFERENCE{friedlander09b,
author = {Michael P. Friedlander},
title = {Algorithms for large-scale sparse reconstruction},
booktitle = {IEMS Colloquim Speaker},
year = {2009},
address = {Northwestern University},
keywords = {minimization, Presentation, SLIM}
}
@CONFERENCE{friedlander08afl,
author = {Michael P. Friedlander},
title = {Algorithms for Large-Scale Sparse Reconstruction},
booktitle = {SINBAD 2008},
year = {2008},
abstract = {Many signal processing applications seek to approximate a signal as
a linear combination of only a few elementary atoms drawn from a
large collection. This is known as sparse reconstruction, and the
theory of compressed sensing allows us to pose it as a structured
convex optimization problem. I will discuss the role of duality in
revealing some unexpected and useful properties of these problems,
and will show how they can lead to practical, large-scale algorithms.
I will also describe some applications of these algorithms.},
keywords = {Presentation, SINBAD, SLIM},
url = {http://slim.eos.ubc.ca/SINBAD2008/Program_files/SINBAD2008_Michael_Alg.pdf}
}
@CONFERENCE{friedlander08asa,
author = {Michael P. Friedlander},
title = {Active-set Approaches to Basis Pursuit Denoising},
booktitle = {SIAM Optimization},
year = {2008},
month = {May},
organization = {SIAM Optimization},
publisher = {SIAM Optimization},
file = {:http\://www.cs.ubc.ca/~mpf/public/mpf08siopt.pdf:PDF},
keywords = {Presentation, SLIM},
url = {http://www.cs.ubc.ca/~mpf/public/mpf08siopt.pdf}
}
@CONFERENCE{friedlander08asm,
author = {Michael P. Friedlander and M. A. Saunders},
title = {Active-set methods for basis pursuit},
booktitle = {West Coast Opitmization Meeting (WCOM)},
year = {2008},
month = {September},
abstract = {Many imaging and compressed sensing applications seek sparse solutions
to large under-determined least-squares problems. The basis pursuit
(BP) approach minimizes the 1-norm of the solution, and the BP denoising
(BPDN) approach balances it against the least-squares fit. The duals
of these problems are conventional linear and quadratic programs.
We introduce a modified parameterization of the BPDN problem and
explore the effectiveness of active-set methods for solving its dual.
Our basic algorithm for the BP dual unifies several existing algorithms
and is applicable to large-scale examples.},
file = {:http\://www.cs.ubc.ca/~mpf/public/mpf08siopt.pdf:PDF},
url = {http://www.cs.ubc.ca/~mpf/public/mpf08siopt.pdf}
}
@CONFERENCE{frijlink10cos,
author = {M.O. Frijlink and Reza Shahidi and Felix J. Herrmann and R.G. van
Borselen},
title = {Comparison of Standard Adaptive Subtraction and Primary-multiple
Separation in the Curvelet Domain},
year = {2010},
organization = {EAGE},
publisher = {EAGE},
abstract = {In recent years, data-driven multiple prediction methods and wavefield
extrapolation methods have proven to be powerful methods to attenuate
multiples from data acquired in complex 3-D geologic environments.
These methods make use of a two-stage approach, where first the multiples
(surface-related and / or internal) multiples are predicted before
they are subtracted from the original input data in an adaptively.
The quality of these predicted multiples often raises high expectations
for the adaptive subtraction techniques, but for various reasons
these expectations are not always met in practice. Standard adaptive
subtraction methods use the well-known minimum energy criterion,
stating that the total energy after optimal multiple attenuation
should be minimal. When primaries and multiples interfere , the minimum
energy criterion is no longer appropriate. Also, when multiples of
different orders interfere, adaptive energy minimization will lead
to a compromise between different amplitudes corrections for the
different orders of multiples. This paper investigates the performance
of two multiple subtraction schemes for a real data set that exhibits
both interference problems. Results from an adaptive subtraction
in the real curvelet domain, separating primaries and multiples,
are compared to those obtained using a more conventional adaptive
subtraction method in the spatial domain.},
file = {frijlink10csa.pdf:http\://slim.eos.ubc.ca/Publications/Public/Conferences/EAGE/2010/frijlink10csa.pdf:PDF},
keywords = {EAGE},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/EAGE/2010/frijlink10csa.pdf}
}
@CONFERENCE{hennenfent08nii2,
author = {Gilles Hennenfent},
title = {New insights into one-norm solvers from the Pareto curve},
year = {2008},
abstract = {Several geophysical ill-posed inverse problems are successfully solved
by promoting sparsity using one-norm regularization. The practicality
of this approach depends on the effectiveness of the one-norm solver
used and on its robustness under limited number of iterations. We
propose an approach to understand the behavior and evaluate the performance
of one-norm solvers. The technique consists of tracking on a graph
the data misfit versus the one norm of successive iterates. By comparing
the solution paths to the Pareto curve, we are able to assess the
performance of the solvers and the quality of the solutions. Such
an assessment is particularly relevant given the renewed interest
in one-norm regularization.},
keywords = {Presentation, SINBAD, SLIM},
url = {http://slim.eos.ubc.ca/SINBAD2008/Program_files/SINBAD2008_Gilles_New.pdf}
}
@CONFERENCE{hennenfent08sdw2,
author = {Gilles Hennenfent},
title = {Simply denoise: wavefield reconstruction via jittered undersampling},
booktitle = {SINBAD 2008},
year = {2008},
abstract = {We present a new discrete undersampling scheme designed to favor wavefield
reconstruction by sparsity-promoting inversion with transform elements
that are localized in the Fourier domain. Our work is motivated by
empirical observations in the seismic community, corroborated by
recent results from compressive sampling, which indicate favorable
(wavefield) reconstructions from random as opposed to regular undersampling.
As predicted by theory, random undersampling renders coherent aliases
into harmless incoherent random noise, effectively turning the interpolation
problem into a much simpler denoising problem. A practical requirement
of wavefield reconstruction with localized sparsifying transforms
is the control on the maximum gap size. Unfortunately, random undersampling
does not provide such a control and the main purpose of this paper
is to introduce a sampling scheme, coined jittered undersampling,
that shares the benefits of random sampling, while offering control
on the maximum gap size. Our contribution of jittered sub-Nyquist
sampling proofs to be key in the formulation of a versatile wavefield
sparsity-promoting recovery scheme that follows the principles of
compressive sampling. After studying the behavior of the jittered-undersampling
scheme in the Fourier domain, its performance is studied for curvelet
recovery by sparsity-promoting inversion (CRSI). Our findings on
synthetic and real seismic data indicate an improvement of several
decibels over recovery from regularly-undersampled data for the same
amount of data collected.},
keywords = {Presentation, SINBAD, SLIM},
url = {http://slim.eos.ubc.ca/SINBAD2008/Program_files/SINBAD2008_Gilles_jit.pdf}
}
@CONFERENCE{hennenfent07jdn,
author = {Gilles Hennenfent},
title = {Just denoise. Nonlinear recovery from randomly sampled data},
booktitle = {SINBAD 2007},
year = {2007},
abstract = {In this talk, we turn the interpolation problem of coarsely-sampled
data into a denoising problem. From this point of view, we illustrate
the benefit of random sampling at sub-Nyquist rate over regular sampling
at the same rate. We show that, using nonlinear sparsity-promoting
optimization, coarse random sampling may actually lead to significantly
better wavefield reconstruction than equivalent regularly sampled
data.},
keywords = {Presentation, SINBAD, SLIM}
}
@CONFERENCE{hennenfent06apo,
author = {Gilles Hennenfent},
title = {A primer on stable signal recovery},
booktitle = {SINBAD 2006},
year = {2006},
abstract = {During this presentation an introduction will be given on the method
of stable recovery from noisy and incomplete data. Strong recovery
conditions that guarantee the recovery for arbitrary acquisition
geometries will be reviewed and numerical recovery examples will
be presented.},
keywords = {Presentation, SINBAD, SLIM},
url = {http://slim.eos.ubc.ca/SINBAD2006/SINBAD2006/Program_files/4-Felix3.pdf}
}
@CONFERENCE{hennenfent06ros,
author = {Gilles Hennenfent},
title = {Recovery of seismic data: practical considerations},
booktitle = {SINBAD 2006},
year = {2006},
abstract = {We propose a method for seismic data interpolation based on 1) the
reformulation of the problem as a stable signal recovery problem
and 2) the fact that seismic data is sparsely represented by curvelets.
This method does not require information on the seismic velocities.
Most importantly, this formulation potentially leads to an explicit
recovery condition. We also propose a large-scale problem solver
for the l1-regularization minimization involved in the recovery and
successfully illustrate the performance of our algorithm on 2D synthetic
and real examples.},
keywords = {Presentation, SINBAD, SLIM},
url = {http://slim.eos.ubc.ca/SINBAD2006/SINBAD2006/Program_files/5-Gilles2.pdf}
}
@CONFERENCE{hennenfent06tnf,
author = {Gilles Hennenfent},
title = {The Nonuniform Fast Discrete Curvelet Transform (NFDCT)},
booktitle = {SINBAD 2006},
year = {2006},
abstract = {The authors present an extension of the fast discrete curvelet transform
(FDCT) to nonuniformly sampled data. This extension not only restores
curvelet compression rates for nonuniformly sampled data but also
removes noise and maps the data to a regular grid.},
keywords = {Presentation, SINBAD, SLIM},
url = {http://slim.eos.ubc.ca/SINBAD2006/SINBAD2006/Program_files/10-Gilles3.pdf}
}
@CONFERENCE{hennenfent04tta1,
author = {Gilles Hennenfent},
title = {Three-term amplitude-versus-offset ({AVO}) inversion revisited by
curvelet and wavelet transforms},
booktitle = {SEG 2004},
year = {2004},
keywords = {Presentation, SEG, SLIM},
presentation = {http://slim.eos.ubc.ca/Publications/Public/Presentations/seg/seg04/hennenfent04tta.pdf}
}
@CONFERENCE{hennenfent07crw,
author = {Gilles Hennenfent and Felix J. Herrmann},
title = {Curvelet reconstruction with sparsity-promoting inversion: successes
an challenges},
booktitle = {EAGE Technical Program Expanded Abstracts},
year = {2007},
month = {June},
organization = {EAGE},
publisher = {EAGE},
abstract = {In this overview of the recent Curvelet Reconstruction with Sparsity-promoting
Inver- sion (CRSI) method, we present our latest 2-D and 3-D interpolation
results on both synthetic and real datasets. We compare these results
to interpolated data using other ex- isting methods. Finally, we
discuss the challenges related to sparsity-promoting solvers for
the large-scale problems the industry faces.},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/EAGE/2007/hennenfent07eage_workshop.pdf}
}
@CONFERENCE{hennenfent07isf,
author = {Gilles Hennenfent and Felix J. Herrmann},
title = {Irregular sampling: from aliasing to noise},
booktitle = {EAGE Technical Program Expanded Abstracts},
year = {2007},
month = {June},
organization = {EAGE},
publisher = {EAGE},
abstract = {Seismic data is often irregularly and/or sparsely sampled along spatial
coordinates. We show that these acquisition geometries are not necessarily
a source of adversity in order to accurately reconstruct adequately-sampled
data. We use two examples to illustrate that it may actually be better
than equivalent regularly subsampled data. This comment was already
made in earlier works by other authors. We explain this behavior
by two key observations. Firstly, a noise-free underdetermined problem
can be seen as a noisy well-determined problem. Secondly, regularly
subsampling creates strong coherent acquisition noise (aliasing)
difficult to remove unlike the noise created by irregularly subsampling
that is typically weaker and Gaussian-like.},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/EAGE/2007/hennenfent07eage.pdf}
}
@CONFERENCE{hennenfent07rii,
author = {Gilles Hennenfent and Felix J. Herrmann},
title = {Recent insights in $L_1$ solvers},
booktitle = {SINBAD 2007},
year = {2007},
abstract = {During this talk, an overview is given on our work on norm-one solvers
as part of the DNOISE project. Gilles will explain the ins and outs
of our iterative thresholding solver based on log cooling while Felix
will present the work of Michael Friedlander "A Newton root-finding
algorithms for large-scale basis pursuit denoise". Both approaches
involve the solution of the basis pursuit problem that seeks a minimum
one-norm solution of an underdetermined least-squares problem. Basis
pursuit denoise (BPDN) fits the least-squares problem only approximately,
and a single parameter determines a curve that traces the trade-off
between the least-squares fit and the one-norm of the solution. In
the work of Friedlander, it is shown show that the function that
describes this curve is convex and continuously differentiable over
all points of interest. They describe an efficient procedure for
evaluating this function and its derivatives. As a result, they can
compute arbitrary points on this curve. Their method is suitable
for large-scale problems. Only matrix-vector operations are required.
This is joint work with Ewout van der Berg and Michael P. Friedlander},
keywords = {Presentation, SINBAD, SLIM}
}
@CONFERENCE{hennenfent07rsn,
author = {Gilles Hennenfent and Felix J. Herrmann},
title = {Random sampling: New insights into the reconstruction of coarsely
sampled wavefields},
booktitle = {SEG Technical Program Expanded Abstracts},
year = {2007},
volume = {26},
number = {1},
pages = {2575-2579},
organization = {SEG},
publisher = {SEG},
abstract = {In this paper, we turn the interpolation problem of coarsely-sampled
data into a denoising problem. From this point of view, we illustrate
the benefit of random sampling at sub-Nyquist rate over regular sampling
at the same rate. We show that, using nonlinear sparsity-promoting
optimization, coarse random sampling may actually lead to significantly
better wavefield reconstruction than equivalent regularly sampled
data. {\copyright}2007 Society of Exploration Geophysicists},
doi = {10.1190/1.2793002},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/SEG/2007/hennenfent07seg.pdf}
}
@CONFERENCE{hennenfent06aos,
author = {Gilles Hennenfent and Felix J. Herrmann},
title = {Application of stable signal recovery to seismic data interpolation},
booktitle = {SEG Technical Program Expanded Abstracts},
year = {2006},
volume = {25},
number = {1},
pages = {2797-2801},
organization = {SEG},
publisher = {SEG},
abstract = {We propose a method for seismic data interpolation based on 1) the
reformulation of the problem as a stable signal recovery problem
and 2) the fact that seismic data is sparsely represented by curvelets.
This method does not require information on the seismic velocities.
Most importantly, this formulation potentially leads to an explicit
recovery condition. We also propose a large-scale problem solver
for the 1-regularization minimization involved in the recovery and
successfully illustrate the performance of our algorithm on 2D synthetic
and real examples. {\copyright}2006 Society of Exploration Geophysicists},
doi = {10.1190/1.2370105},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/SEG/2006/hennenfent06seg.pdf}
}
@CONFERENCE{hennenfent05scd,
author = {Gilles Hennenfent and Felix J. Herrmann},
title = {Sparseness-constrained data continuation with frames: applications
to missing traces and aliased signals in {2/3-D}},
booktitle = {SEG Technical Program Expanded Abstracts},
year = {2005},
volume = {24},
number = {1},
pages = {2162-2165},
organization = {SEG},
publisher = {SEG},
abstract = {We present a robust iterative sparseness-constrained interpolation
algorithm using 2-/3-D curvelet frames and Fourier-like transforms
that exploits continuity along reflectors in seismic data. By choosing
generic transforms, we circumvent the necessity to make parametric
assumptions (e.g. through linear/parabolic Radon or demigration)
regarding the shape of events in seismic data. Simulation and real
data examples for data with moderately sized gaps demonstrate that
our algorithm provides interpolated traces that accurately reproduce
the wavelet shape as well as the AVO behavior. Our method also shows
good results for de-aliasing judged by the behavior of the ($f-k$)-spectrum
before and after regularization. {\copyright}2005 Society of Exploration
Geophysicists},
doi = {10.1190/1.2148142},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/SEG/SEG2005_Data_Cont.pdf}
}
@CONFERENCE{hennenfent04tta,
author = {Gilles Hennenfent and Felix J. Herrmann},
title = {Three-term amplitude-versus-offset ({AVO}) inversion revisited by
curvelet and wavelet transforms},
booktitle = {SEG Technical Program Expanded Abstracts},
year = {2004},
volume = {23},
number = {1},
pages = {211-214},
organization = {SEG},
publisher = {SEG},
abstract = {We present a new method to stabilize the three-term AVO inversion
using Curvelet and Wavelet transforms. Curvelets are basis functions
that effectively represent otherwise smooth objects having discontinuities
along smooth curves. The applied formalism explores them to make
the most of the continuity along reflectors in seismic images. Combined
with Wavelets, Curvelets are used to denoise the data by penalizing
high frequencies and small contributions in the AVO-cube. This approach
is based on the idea that rapid amplitude changes along the ray-parameter
axis are most likely due to noise. The AVO-inverse problem is linearized,
formulated and solved for all (x, z) at once. Using densities and
velocities of the Marmousi model to define the fluctuations in the
elastic properties, the performance of the proposed method is studied
and compared with the smoothing along the ray-parameter direction
only. We show that our method better approximates the true data after
the denoising step, especially when noise level increases. {\copyright}2004
Society of Exploration Geophysicists},
doi = {10.1190/1.1851201},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/SEG/hennenfent04seg.pdf}
}
@CONFERENCE{hennenfent05scs,
author = {Gilles Hennenfent and Felix J. Herrmann and R. Neelamani},
title = {Sparseness-constrained seismic deconvolution with curvelets},
booktitle = {CSEG Technical Program Expanded Abstracts},
year = {2005},
month = {May},
organization = {CSEG},
publisher = {CSEG},
abstract = {Continuity along reflectors in seismic images is used via Curvelet
representation to stabilize the convolution operator inversion. The
Curvelet transform is a new multiscale transform that provides sparse
representations for images that comprise smooth objects separated
by piece-wise smooth discontinuities (e.g. seismic images). Our iterative
Curvelet-regularized deconvolution algorithm combines conjugate gradient-based
inversion with noise regularization performed using non-linear Curvelet
coefficient thresholding. The thresholding operation enhances the
sparsity of Curvelet representations. We show on a synthetic example
that our algorithm provides improved resolution and continuity along
reflectors as well as reduced ringing effect compared to the iterative
Wiener-based deconvolution approach.},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/CSEG/Hennenfent_G_Sparseness_Constrained_Deconvolution_w_Curvelets.doc}
}
@CONFERENCE{hennenfent05sdr,
author = {Gilles Hennenfent and R. Neelamani and Felix J. Herrmann},
title = {Seismic deconvolution revisited with curvelet frames},
booktitle = {EAGE Technical Program Expanded Abstracts},
year = {2005},
month = {June},
organization = {EAGE},
publisher = {EAGE},
abstract = {We propose an efficient iterative curvelet-regularized deconvolution
algorithm that exploits continuity along reflectors in seismic images.
Curvelets are a new multiscale transform that provides sparse representations
for images (such as seismic images) that comprise smooth objects
separated by piece-wise smooth discontinuities. Our technique combines
conjugate gradient-based convolution operator inversion with noise
regularization that is performed using non-linear curvelet coefficient
shrinkage (thresholding). The shrinkage operation leverages the sparsity
of curvelets representations. Simulations demonstrate that our algorithm
provides improved resolution compared to the traditional Wiener-based
deconvolution approach.},
keywords = {SLIM},
url = {http://slim.eos.ubc.ca/Publications/Public/Conferences/EAGE/hennenfent05eage_poster.pdf}
}
@CONFERENCE{Herrmann03msa,
author = {Felix J. Herrmann},
title = {Multifractional splines: application to seismic imaging},
booktitle = {SPIE},
year = {2003},
editor = {M. Unser and A. Aldroubi},
volume = {5207},
pages = {240-258},
organization = {SPIE},
publisher = {SPIE},
keywords = {SLIM},
presentation = {http://slim.eos.ubc.ca/Publications/Public/Presentations/herrmann03msa.pdf
},
url = {http://www.eos.ubc.ca/~felix/Preprint/SPIE03DEF.pdf}
}
@CONFERENCE{Herrmann2003SPIE,
author = {Felix J. Herrmann},
title = {Multifractional splines: application to seismic imaging},
booktitle = {Proceedings of SPIE Technical Conference on Wavelets: Applications
in Signal and Image Processing X},
year = {2003},
editor = {M. Unser and A. Aldroubi},
volume = {5207},
pages = {240-258},
organization = {SPIE},
publisher = {SPIE},
abstract = {Seismic imaging commits itself to locating singularities in the elastic
properties of the Earth{\textquoteright}s subsurface. Using the high-frequency
ray-Born approximation for scattering from non-intersecting smooth
interfaces, seismic data can be represented by a generalized Radon
transform mapping the singularities in the medium to seismic data.
Even though seismic data are bandwidth limited, signatures of the
singularities in the medium carry through this transform and its
inverse and this mapping property presents us with the possibility
to develop new imaging techniques that preserve and characterize
the singularities from incomplete, bandwidth-limited and noisy data.
In this paper we propose a non-adaptive Curvelet/Contourlet technique
to image and preserve the singularities and a data-adaptive Matching
Pursuit method to characterize these imaged singularities by Multi-fractional
Splines. This first technique borrows from the ideas within the Wavelet-Vaguelette/Quasi-SVD
approach. We use the almost diagonalization of the scattering operator
to approximately compensate for (i) the coloring of the noise and
hence facilitate estimation; (ii) the normal operator itself. Results