<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0"
	xmlns:content="http://purl.org/rss/1.0/modules/content/"
	xmlns:wfw="http://wellformedweb.org/CommentAPI/"
	xmlns:dc="http://purl.org/dc/elements/1.1/"
	xmlns:atom="http://www.w3.org/2005/Atom"
	xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
	xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
	>

<channel>
	<title>UgoTrade &#187; ardevcamp</title>
	<atom:link href="http://www.ugotrade.com/tag/ardevcamp/feed/" rel="self" type="application/rss+xml" />
	<link>http://www.ugotrade.com</link>
	<description>Augmented Realities at the Edge of the Network</description>
	<lastBuildDate>Wed, 25 May 2016 15:59:56 +0000</lastBuildDate>
	<language>en-US</language>
		<sy:updatePeriod>hourly</sy:updatePeriod>
		<sy:updateFrequency>1</sy:updateFrequency>
	<generator>https://wordpress.org/?v=3.9.40</generator>
	<item>
		<title>Platforms for Growth and Points of Control for Augmented Reality: Talking with Chris Arkenberg</title>
		<link>http://www.ugotrade.com/2010/10/27/platforms-for-growth-and-points-of-control-for-augmented-reality-talking-with-chris-arkenberg/</link>
		<comments>http://www.ugotrade.com/2010/10/27/platforms-for-growth-and-points-of-control-for-augmented-reality-talking-with-chris-arkenberg/#comments</comments>
		<pubDate>Wed, 27 Oct 2010 09:14:49 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Android]]></category>
		<category><![CDATA[Artificial Intelligence]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[privacy and online identity]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[AR and html 5]]></category>
		<category><![CDATA[AR eyewear]]></category>
		<category><![CDATA[AR eyewear for smart phones]]></category>
		<category><![CDATA[ardevcamp]]></category>
		<category><![CDATA[arduino]]></category>
		<category><![CDATA[ARWave]]></category>
		<category><![CDATA[augmented foraging]]></category>
		<category><![CDATA[augmented reality event]]></category>
		<category><![CDATA[augmented reality eyewear]]></category>
		<category><![CDATA[augmented reality on tablets]]></category>
		<category><![CDATA[augmented reality search]]></category>
		<category><![CDATA[cloud computing and AR]]></category>
		<category><![CDATA[EarthMine]]></category>
		<category><![CDATA[gartner hype cycle]]></category>
		<category><![CDATA[Gary Hayes]]></category>
		<category><![CDATA[John Battelle]]></category>
		<category><![CDATA[Kevin Slavin]]></category>
		<category><![CDATA[Layar]]></category>
		<category><![CDATA[location based services]]></category>
		<category><![CDATA[Metaio]]></category>
		<category><![CDATA[Mobile AR]]></category>
		<category><![CDATA[mobile social augmented reality]]></category>
		<category><![CDATA[MUVEdesign]]></category>
		<category><![CDATA[NVidia augmented reality demo]]></category>
		<category><![CDATA[Ogmento]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[Platforms for Growth]]></category>
		<category><![CDATA[Points of Control Map]]></category>
		<category><![CDATA[Porthole]]></category>
		<category><![CDATA[QR codes]]></category>
		<category><![CDATA[Qualcomm SDK for AR]]></category>
		<category><![CDATA[real time analytics and AR]]></category>
		<category><![CDATA[RFID]]></category>
		<category><![CDATA[Simple Geo]]></category>
		<category><![CDATA[The Battle for the Internet Economy]]></category>
		<category><![CDATA[Tim O'Reilly]]></category>
		<category><![CDATA[Total Immersion]]></category>
		<category><![CDATA[transmedia story telling]]></category>
		<category><![CDATA[trasmedia]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[Ushahidi]]></category>
		<category><![CDATA[Usman Haque]]></category>
		<category><![CDATA[vision based AR]]></category>
		<category><![CDATA[W3C group on augmented reality]]></category>
		<category><![CDATA[Wave in a Box]]></category>
		<category><![CDATA[Web 2.0 Expo]]></category>
		<category><![CDATA[web standards based browser for AR]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=5924</guid>
		<description><![CDATA[The Points of Control map is interactive, so please click here or on the image above for the full experience. Today at 4pm EST, 1pm PDT John Battelle and Tim O&#8217;Reilly will discuss the Points of Control map and The Battle for the Internet Economy in a Free Webcast: &#8220;More than any time in the [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://map.web2summit.com/"><img class="alignnone size-medium wp-image-5931" title="Screen shot 2010-10-27 at 1.56.15 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-1.56.15-AM-300x181.png" alt="Screen shot 2010-10-27 at 1.56.15 AM" width="300" height="181" /></a></p>
<p><em>The Points of Control map is interactive, so please <a href="http://map.web2summit.com/" target="_blank">click here </a>or on the image above for the full experience.</em></p>
<p><em> </em>Today at 4pm EST, 1pm PDT John Battelle and Tim O&#8217;Reilly will discuss the <a href="http://map.web2summit.com/" target="_blank">Points of Control</a> map and The Battle for the Internet Economy <a href="http://oreilly.com/emails/poc_web2summit-webcast-prg.html" target="_blank">in a Free Webcast</a>:</p>
<p><strong>&#8220;More than any time in the history of the Web, incumbents in the network  economy are consolidating their power and staking new claims to key  points of control. It&#8217;s clear that the internet industry has moved into a  battle to dominate the Internet Economy.</strong></p>
<p><strong>John Battelle and Tim O&#8217;Reilly will debate and discuss these shifting  points of control as the board becomes increasingly crowded. They&#8217;ll map  critical inflection points and identify key players who are clashing to  control services and infrastructure as they attempt to expand their  territories. They&#8217;ll also explore the effect these chokepoints could  have on people, government, and the future of technology innovation.&#8221;</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.01.38-AM.png"><img class="alignnone size-medium wp-image-5932" title="Screen shot 2010-10-27 at 2.01.38 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.01.38-AM-300x124.png" alt="Screen shot 2010-10-27 at 2.01.38 AM" width="300" height="124" /></a></p>
<p><em> </em>I&#8217;ve been wanting to start a discussion on theÂ  <a href="http://map.web2summit.com/">Points of Control map </a>in the Augmented Reality community for a while now, and Chris&#8217; recent post on <a href="http://www.gartner.com/it/page.jsp?id=1447613" target="_blank">the latest edition of the Gartner Hype Cycle</a>, <a href="http://www.urbeingrecorded.com/news/2010/10/13/is-ar-ready-for-the-trough-of-disillusionment/" target="_blank">&#8220;Is AR Ready for the Trough of Disillusionment?&#8221; </a>and this post by Mac  Slocum, <a href="http://radar.oreilly.com/2010/10/two-ways-augmented-reality-app.html" target="_blank">â€œHow Augmented Reality Apps Can Catch On,&#8221;</a> and the conversation in the comments between Mac, Raimo (one of the founders of <a href="http://www.layar.com/" target="_blank">Layar)</a>, and Chris, all prompted me to get a conversation started&#8230;(see below for all that followed!).Â  Chris put me on the hot seat back in June when he did <a href="http://www.boingboing.net/2010/06/17/tish-shute---augment.html" target="_blank">this very generous interview with me on Boing Boing</a>, so it was time to turn the tables.</p>
<p>Tim O&#8217;Reilly, in hisÂ <a href="http://www.youtube.com/watch?v=3637xFBvkYg&amp;p=6F97A6F4BA797FB3" target="_blank"> keynote for Web 2.0 Expo,</a> pointed out there is both a fun and a dark side to the Points of Control map.Â  There are companies on this map, he noted, that rather than &#8220;growing the pie,&#8221; are  trying to divide up the pie, and they are forgetting to think about  creating a sustainable ecosystem. I expect the conversation between Tim O&#8217;Reilly and John Battelle to dig deep into this Battle for the Internet Economy.Â  If, like me, you have another engagement at the time of the webcast, you can register on the site to receive the recording.</p>
<p>AR is still too young to figure in the battles of the giants, but there will be a lot to be learned from this conversation.Â  And, The Points of Control map is good to think with from the POV of AR in many ways.Â  As Chris Arkenberg observed:</p>
<p><strong>&#8220;When I look at this map, the points of control map, itâ€™s  really interesting to me, because what it says to me with respect to AR  is each of these little regions that they have drawn out would be a  great research project. So every single one of these should be  instructive to AR.</strong></p>
<p><strong>In other words, we should be able to look at social networks,  the land of search, or kingdom of ecommerce, and apply some very  rigorous critical thinking to say, â€œHow would AR add to this engagement,  this experience of gaming, or ecommerce, or content?â€</strong></p>
<p><strong>Looking at each of these individually and really meticulously  saying, â€œOK, well yes, it can do this but how is that different from  the current screen media experience, the current web experience that we  have of all these types of things?â€ Â  You know, how can augmented  reality really add a new layer of value and experience to these? And I  think that process would really trim a lot of the fat from the hopes and  dreams of AR and anchor it down into some very pragmatic avenues for  development.Â   And then you could start looking at, â€œWell, OK, what  happens when we start combining these?â€ When we take gaming levels and  plug that into the location basin, as you suggested.&#8221;</strong></p>
<p>Chris Arkenberg is a technology professional with a focus on product strategy &amp; development, specializing in 3D, augmented reality, ubicomp and the social web. He uses research, scenario planning, and foresight methodologies to help organizations anticipate change and adopt a resilient and forward-looking posture in the face of unprecedented uncertainty. His personal work is collected at <a href="http://urbeingrecorded.com " target="_blank">urbeingrecorded</a>, and his <a href="http://www.linkedin.com/in/chrisarkenberg" target="_blank">professional profile is here.</a></p>
<p>He is also one of the founder/organizers of <a href="http://ardevcamp.org" target="_blank">AR DevCamp</a> which is currently scheduled for Dec. 4th (somewhere in SF or The Valley!)Â  Chris said, &#8220;No further details atm (still trying to find a venue and get sponsors) but please direct people to http://ardevcamp.org for upcoming information.&#8221;</p>
<h3>Talking with Chris Arkenberg</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/ChrisArkenberg.jpg"><img class="alignnone size-medium wp-image-5929" title="ChrisArkenberg" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/ChrisArkenberg-300x199.jpg" alt="ChrisArkenberg" width="300" height="199" /></a></p>
<p><strong>Tish Shute:</strong> I know some people thought <a href="http://www.gartner.com/it/page.jsp?id=1447613" target="_blank">the positioning of AR by Gartner near the peak of the hype cycle </a>was misguided, and based on a very narrow understanding of AR as used in marketing apps. But reading your post I thought you made a lot of good points.</p>
<p><strong>Chris Arkenberg:  Itâ€™s tracking hype, right?  Itâ€™s not necessarily tracking the growth of the technologies or their maturation so much as itâ€™s tracking the general attention level.  And whatâ€™s interesting to me is that tends to affect the amount of money that goes into those technologies.</strong></p>
<p><strong>Tish Shute:</strong> I was particularly interested in your post because I have been writing a post about two recent Oâ€™Reilly events in NYC, <a href="http://makerfaire.com/newyork/2010/" target="_blank">Maker Faire</a>, <a href="http://www.web2expo.com/">Web 2.0 Expo</a>, and then <a href="http://www.cloudera.com/company/press-center/hadoop-world-nyc/" target="_blank">Hadoop World</a>, where Tim gave a very interesting 45 minute keynote.Â Â  AR was pretty low profile at all three events.Â Â <a href="../../augmented%20reality%20at%20web%202.0%20http://www.flickr.com/photos/bdave2007/5036397168/in/photostream/" target="_blank"> But the NVidia augmented reality demo attracted a lot of attention at the sponsors expo, </a> and Usman Haque, Founder of <a href="http://www.pachube.com/" target="_blank">Pachube</a> announced in<a href="http://www.web2expo.com/webexny2010/public/schedule/speaker/43845" target="_blank"> his presentation</a>,  they are working on an augmented reality interface for Pachube called Porthole, its designed for  facilities management and, â€œas a consumer-oriented application that  extends the universe of Pachube data into the context of AR â€“ a  â€˜portholeâ€™ into Pachubeâ€™s data environments.. &#8220;Â  Usman also mentioned, when I talked to him, that he is contributing to the AR standards discussion and on the program committee now <a href="http://www.w3.org/2010/06/16-w3car-minutes.html#item02" target="_blank">for the W3C group on augmented reality</a>.Â  For more on this standards discussion and the Pachube AR interface, see Chris Burmanâ€™s paper for the W3C, <a href="http://www.w3.org/2010/06/w3car/portholes_and_plumbing.pdf" target="_blank">Portholes and Plumbing: how AR erases boundaries between â€œphysicalâ€ and â€œvirtual.&#8221;</a></p>
<p>I think pioneers in the augmented reality commmunity should pay attention to these wider conversations about the Battle for the Internet Economy, and the exploration of theÂ  â€œPlatforms for Growthâ€ theme at <a href="http://www.web2expo.com/">Web 2.0 Expo</a> is very important- this is a course also a nudge to read my upcoming post on these O&#8217;Reilly events!</p>
<p>Also I have another project I have been chewing on that I would like to talk to you about. Â   I want to start an AR conversation about the wonderful <a href="http://map.web2summit.com/">Points of Control map</a> produce for Web 2.0 summit by <a href="http://battellemedia.com/" target="_blank">John Battelle</a>. [ Note there will be, "Battle for the Internet Economy" free Web2Summit webcast w/ @johnbattelle &amp; @timoreilly Wed 10/27 at 1pm PT http://bit.ly/b46cmb #w2s]</p>
<p>Up to this point, understandably given the immaturity of the technology, AR has little role in the â€œBattle for the Internet  Economy.â€Â    But this doesnâ€™t mean that the map isnâ€™t good for AR visionaries, enthusiasts, entrepreneurs, and developers to think with. Â   And both you and Tim have pointed out the potential for AR to leverage the giant data subsystems in the sky. Â  I have to say the positioning of Cloud Computing on the brink of heading down into the trough of disillusionment in this recent rendition of the Gartner Hype Cycle seems ridiculous!</p>
<p>Cloud Computing is already ubiquitous hardly seems credible that it is headed for a trough of disillusionment!</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.48.30-AM.png"><img class="alignnone size-medium wp-image-5940" title="Screen shot 2010-10-27 at 2.48.30 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.48.30-AM-300x199.png" alt="Screen shot 2010-10-27 at 2.48.30 AM" width="300" height="199" /></a></p>
<p><strong>Chris Arkenberg:  Yeah, itâ€™s ubiquitous so why even talk about it when itâ€™s your fundamental infrastructure?</strong></p>
<p><strong>Tish Shute:</strong> Yeah and I seriously doubt it is  imminently headed for a  trough of disillusionmentâ€¦.and this brings me back to the Points of Control Map which as John Batelle points out,  â€œaims to  identify key players who are battling to control the services and infrastructure of a websquared worldâ€ in which the â€œWeb and the world intertwine through mobile and sensor platforms.â€Â   This instrumented world, of course, creates a great deal of opportunity for augmented reality.  Have you seen that, that points of control map?</p>
<p><strong>Chris Arkenberg:  I think I have, actually.</strong></p>
<p><strong>Tish Shute: </strong> There has been much debate about how this intertwining of the web and  the world will play out in augmented reality.Â Â  Chris Burman points out in his position paper for W3C,Â  <a href="http://www.w3.org/2010/06/w3car/portholes_and_plumbing.pdf" target="_blank">Portholes and Plumbing: how AR erases boundaries between â€œphysicalâ€ and â€œvirtualâ€</a>, that &#8220;trying to draw parallels between a browser based web and the possibilities of AR may solve issues of information distribution in the short-term,&#8221;Â  but it must not have a limiting effect in the long-term.Â Â  But now we at least have one <a href="https://research.cc.gatech.edu/polaris/" target="_blank">web standards-based browser for AR</a> thanks to the work of Blair MacIntyre and the Georgia Tech team.Â  But  I think the discussion in the comments of Mac Slocumâ€™s recent post, <a href="http://radar.oreilly.com/2010/10/two-ways-augmented-reality-app.html" target="_blank">â€œHow Augmented Reality Apps Can Catch Onâ€</a> is an interesting starting point from which to think about platforms of growth for AR.Â   I am not sure if I am stretching his meaning but I think Raimo, <a href="http://www.layar.com/" target="_blank">Layar</a>, is suggesting that what the Point of Control map call the the Plains of Media content is very important to the growth of the fledgling AR industry right now.   And I would agree with this, and add that the neighboring terrain of gaming levels will be pretty key as one of my other favorite AR start ups <a href="http://ogmento.com/" target="_blank">Ogmento</a> hopes to reveal in the near future!  But what do you think was most important in this brief but pithy dialogue between you Raimo and Mac?</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.56.02-AM.png"><img class="alignnone size-medium wp-image-5941" title="Screen shot 2010-10-27 at 2.56.02 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.56.02-AM-300x179.png" alt="Screen shot 2010-10-27 at 2.56.02 AM" width="300" height="179" /></a></p>
<p>[The screenshot above isÂ <a title="MuveDesign" href="http://www.muvedesign.com/"></a>a teaser video the <a title="Gary Hayes" href="http://www.personalizemedia.com/future-of-location-based-augmented-reality-story-games/?utm_source=feedburner&amp;utm_medium=twitter&amp;utm_campaign=Feed:+PersonalizeMedia+%28PERSONALIZE+MEDIA%29" target="_blank">Gary Hayes</a> from <a title="MuveDesign" href="http://www.muvedesign.com/">MUVEdesign</a> for his upcoming (2011 release date), game called Time Treasure.Â  See Gary&#8217;s <a title="Gary Hayes" href="http://www.personalizemedia.com/future-of-location-based-augmented-reality-story-games/?utm_source=feedburner&amp;utm_medium=twitter&amp;utm_campaign=Feed:+PersonalizeMedia+%28PERSONALIZE+MEDIA%29" target="_blank">blog</a> for more and Gary&#8217;sÂ <a href="http://www.personalizemedia.com/16-top-augmented-reality-business-models/" target="_blank"> post from over a year ago</a> on AR Business models.Â  Thomas K. Carpenter, <a href="http://gamesalfresco.com/2010/10/25/time-treasure-future-tablet-game/" target="_blank">on Games Alfresco notes</a>, &#8220;I think this is a terrific idea and I find it interesting heâ€™s planning this on a tablet rather than a smartphone.&#8221;</p>
<p><strong>Chris Arkenberg:  The way I took itâ€¦And to give a little bit of context, I came from sort of this apprehension of augmented reality as an expression of the existing Internet.  So as sort of a visualization layer that allows you to kind of draw out data, and then, with all the affordances of being able to anchor it to real world things.</strong></p>
<p><strong>And my own sort of path has led me to want to really try to understand that and refine it, particularly with respect to the sort of Internet of things and the smarter planet idea of just having embedded systems everywhere.  And specifically, what is the value-add  for augmented reality as a visualization layer of an instrumented world?</strong></p>
<p><strong>And so thatâ€™s caused me to be a bit biased towards that side of AR.  And the way I took Raimoâ€™s comment was that he was saying that, â€œYou know, really what weâ€™re interested in is media.â€  That he was effectively saying that AR for them is really just about that space between the screen and the the world, or between your eyes and the world, and what you can do there.</strong></p>
<p><strong>Certainly I had considered it in the past, but I hadnâ€™t really focused on it or assumed that it was a priority as a business model.  And so he kind of reminded me that, actually, thereâ€™s a lot of entertainment applications.  Thereâ€™s a lot of, obviously, advertising and marketing applications.<br />
And so I felt that I was being a little narrow in my focusâ€¦</strong></p>
<p><strong>Tish Shute: </strong> Yes this comes to the heart of what I am interested in about the role AR can play in opening up new relationships to the world of data that we live, not just making it more accessible and useful to us when and where we need it, but AR as a road to reimaginingÂ  it..</p>
<p>Have you seen any interesting work yet to explore these great data economies in the cloud through AR.  I mean can you think of any others &#8211; there is <em><em><a href="http://www.planefinder.net/" target="_blank">planefinder.net</a> </em></em> but others?</p>
<p><strong>Chris Arkenberg:  Iâ€™ve seen a few just sort of skunk works type applications that people have been playing around with, again, to try and reveal things.  One of them was similar to the aircraft, but it was more for military use and being able to identify things of interest in the sky.  Iâ€™ve seen a couple other for navigation, so being able to identify mountain peaks on a visual plane, for example, but this isnâ€™t so much about revealing an instrumented world.</strong></p>
<p><strong>Tish Shute:</strong> Yeah, I think that was from the Imagination right?  I know thatâ€™s an interesting one. Usman at Web 2.0 Expo, <a href="http://www.web2expo.com/webexny2010/public/schedule/speaker/43845" target="_blank">in his presentation,</a> mentioned the work Pachube is doing on an Augmented Reality interface.  I interviewed Usman again as my last long interview with him was nearly 18 months ago now and Pachube is well on the way to becoming the Facebook of Data or the analogy that Usman prefers &#8211; the Twitter of sensors!</p>
<p><strong>Chris Arkenberg:  Hmm, interesting.</strong></p>
<p><strong>Tish Shute:</strong> And to go back to your comments on Augmented Reality not getting caught in some of the traps that have made virtual worlds lose relevancy I think that is vital that AR developers understand the strategic possibilities of key points of control in the internet economy because the isolation and Balkanization of virtual worlds were certainly a factor in their rapid slide into the trough of disillusionment &#8211; although many would argue that a fundamental flaw in the kind of virtual experience that Second Life and other virtual worlds constructed was really the fatal flaw (see James Turner&#8217;s interview with Kevin Slavin <a href="http://radar.oreilly.com/2010/09/drawing-the-line-between-games.html" target="_self">Reality has a gaming layer</a>).</p>
<p>But Second Lifeâ€™s isolation from the other great network economies of the internet was certainly a limiting factor.</p>
<p><strong>Chris Arkenberg:  And thatâ€™s been exactly my sense, and Iâ€™ve, over the years, tried to encourage development in that direction for virtual worlds.  I did work, through Adobe, to help develop Atmosphere 3D back in the the early 2000â€™s.  And we did a lot of work to try and understand the marketplace and the specific value-add of doing things in 3D over 2D.</strong></p>
<p><strong>And this is kind of why I keep referring back to VR and VWâ€™s with respect to augmented reality, is that with immersive worlds, there was this ideaâ€¦there was this big rush.  Everybody was so excited about it.  It was obviously the next cool thing.  And everybody wanted to try to do everything in it.  You could do your shopping in virtual worlds. You could have meetings in virtual worlds.</strong></p>
<p><strong>Tish Shute:</strong> and  shopping, yes ..that didn&#8217;t work out so well!</p>
<p><strong>Chris Arkenberg:  And everybody was very excited in developing these things.  And what it really came down it is, â€œYeah, you can, but itâ€™s actually a lot better to do those things on a flat plane or in person.â€  Meeting Place, WebEx, TelePresence &#8211; those tools generally do a much better job at facilitating TelePresence meetings than a virtual world does. The same with TelePresent Education. There are only very specific things that both VR and AR are really good at.</strong></p>
<p><strong>And thatâ€™s where I find myself with augmented reality right now, trying to really pick through that and critically look at which uses are really appropriate for an AR overlay. And again, I think thatâ€™s why the hype cycle is important, because it reflects back this desire that AR is going to be the next big thing &#8211; the be-all, end-all of interacting with data in the cloud &#8211; and forces us all to take a critical look at why we should do things in AR instead of on a screen.</strong></p>
<p><strong>AR is not going to work well for most things but itâ€™s going to be very good for certain uses.  Right now Iâ€™m very keen at trying to understand what those things might be.</strong></p>
<p><strong>Tish Shute:</strong> I had this wonderful conversation (more in an upcoming post) with Kevin Slavin one of the founders of <a href="http://areacodeinc.com/" target="_blank">Area/Code</a> at Web 2.0 Expo and I think some of what he describes about the data brokerages of High Frequency trading have some interesting implications for ARâ€™s role, say, in ubiquitous computing.  The trading markets are now pretty much dominated by machine to machine intelligence; machine to machine brokerages.  They are basically game economies on the scale that we can barely wrap our heads around where the speed that bots and algo traders can access the network is the key.  We really have no clue what is going on  until we lose our houseâ€¦</p>
<p>Kevin was also<a href="http://radar.oreilly.com/2010/09/drawing-the-line-between-games.html" target="_blank"> interviewed by James Turner on Oâ€™Reilly Radar.</a> He talked about how much of the interesting work in location based mobile social apps is defined in opposition to the model of Second Life.  He also talked to me about  how we are seeing â€œfirst lifeâ€ take on the qualities of â€œsecond life.â€  What goes on the trading floor is largely a performance secondary to a more important world of machine intelligence with giant co-located servers  and bots fighting for trading advantages measured in fractions of seconds.</p>
<p>He pointed out how we draw on all these tropes from sci-fi movies, these HUDs based on ideas of machine intelligence where the robot talks to the other robot in English through an English HUD!Â  Many of our current visual tropes for AR are perhaps just as inadequate for the kind of data driven world we live in.</p>
<p>Of course, when you are thinking of having fun with  dinosaurs, or illustrated books, or whatever, this is not, perhaps, an issue.Â  But if you are thinking of augmented reality interfaces as being important in a battle for network economy, and platforms for growth,Â  how this new interface helps us live better in a world of data is an important issue.</p>
<p><strong>Chris Arkenberg:  Now, does that indicate that the UI just needs more overhaul and innovation, or more that the visual interface for those experiences shouldnâ€™t really leave the screen?  It shouldnâ€™t move on to the view plane?</strong></p>
<p><strong>Tish Shute: </strong> Yes we have a few concept videos that try and explore this ..</p>
<p><strong>Chris Arkenberg:  Well, and I think this will happen at the level of human-computer interface.  I mean thatâ€™s always been its role, in making coherent the sort of machine mind, for lack of a better term, making it coherent to the human mind. So I mean there is a lot of this sort of machine intelligence, the semantic Web 3.0 revolution, where it really is about enabling machines, and agents, and bots to understand the content that weâ€™re feeding them.</strong></p>
<p><strong>But at the end of the day, they, for now, need to be providing value to us human operators. So thereâ€™s always going to be a role for  human-computer interface and user experience design to make this stuff meaningful.</strong></p>
<p><strong>I mean, if you look at the revolution in visualization &amp; data viz, this is of incredible value because it takes a tremendous amount of data and collates it into a glanceable graphic that you can look at and immediately comprehend massive amounts of data because itâ€™s delivered in a handy, visual way.</strong></p>
<p><strong>So I see that as a fascinating design challenge, how the user experience of the data world can be translated into meaningful human interaction.</strong></p>
<p><strong>Tish Shute:</strong> Yeah.  And when we see <a href="http://stamen.com/" target="_blank">Stamen Design</a> pursuing a big idea in AR, thatâ€™s when we might start to rock and roll, right?</p>
<p><strong>Chris Arkenberg:  Yeah. In my article, I sort of jokingly suggested that Apple will create the iShades.  But, theyâ€™ve got the track record of being way ahead of the curve and delivering the future in very bold forms.</strong></p>
<p><strong>Tish Shute:</strong> A key part for the battle for the network economy is to bring the complexity of data into the human realm in a way that increases human agency.  Kevin suggests that the giant robot casinos of markets should actually lift off into total abstractions as theses machine-driven trades get back into the human realm in ways that are so damaging to our lives &#8211;  a lost house or job!  The notion of a counterveillence society where people have more agency over the important aspects of their lives, health, housing, job (which I discussed with Kevin &#8211; interview upcoming) has gotten pretty tricky!</p>
<p>But I think we will begin to see AR eyewear for specific applications (gaming and industrial) get more common fairly soon &#8211; possibly as smart phone accessories.</p>
<p>And it is clear that AR is going to be, increasingly,Â  a part of our entertainment smorgesborg in coming months. Itouch has a camera (although lower resolution),  Nintendoâ€™s are AR-ready and many aspects of the AR vision of hands-free spatial interfaces will go mainstream through Natal.</p>
<p>But we are yet to see an app/platform emerge for  mobile. Social AR games that turn every bar and cafe and ultimately the whole city into a gaming venueÂ  -although I think Ogmento and MUVE aim to lead the way here!  Will an AR company achieve Zynga level success by using the Foursquare, for example?</p>
<p>My feeling is that the lesson of Zynga is pretty important for mobile social AR games.  Could Flash social gaming have taken off without Facebook?</p>
<p><strong>Chris Arkenberg:  And thatâ€™s the real driver.  And again, as you mentioned with Second Life, and this was exactly my own sense, is that they stuck to the closed garden model and didnâ€™t get the power of social and collaboration.  They attempted to add some of those affordances within the world, but, you know, ultimately most people arenâ€™t in virtual worlds, and most people arenâ€™t using augmented reality.  So leveraging the really predominate platforms like Twitter and Facebook and Foursquare, being able to leverage those affordances, that connectivity, into a platform like augmented reality, I think, is really critical. Because again, you get nothing unless you have the masses, unless you have people present.</strong></p>
<p><strong>Tish Shute:</strong> In AR research there is a long history of the  notion of powerful AR-dedicated devices, but smart phones and tablets are good enough,Â  and can launch augmented reality into the heart of the internet economy.  I thinkÂ  the elusive AR eyewear will come to us initially as a smart phone accessory for specific apps.Â  But, for the moment, most AR apps make little attempt to play in the wider internet economy.</p>
<p><strong>Chris Arkenberg:  And I think itâ€™s actually much lower hanging fruit, really, to do gaming, marketing, transmedia.  Because then you donâ€™t really care about the cloud, or maybe you only really care about a little part of it that your gaming property is addressing. Then it becomes much more about entertainment, and much more about persuasion, and sensationalism.  And if youâ€™ve got dancing dinosaurs on your street, great!  Itâ€™s entertaining, itâ€™s cool, itâ€™s new. That stuff is fairly straightforward.</strong></p>
<p><strong>I keep coming back to this idea of, you know, the instrumented city.  What sort of data trails do you get out of a fully instrumented city?  So maybe you get traffic patterns, maybe you get geo-local movements of masses, maybe you get energy usage, that sort of thing, all the, sort of  heat maps you can generate from a city. But then what good does it do to be able to have that on an augmented reality layer versus just looking at it on a mobile device or looking at it on your laptop?</strong></p>
<p><strong>Tish Shute:</strong> Of course the use cases for â€œmagic lensâ€ AR are different from the kind of hands free, 360 view with tightly registered media, that a full vision of AR has always promised.  The 360 view is  quite a different metaphor from the web and mobile rectangular screens.</p>
<p><strong>Chris Arkenberg:  Yes, yes.</strong></p>
<p><strong>Tish Shute:</strong> Did you see that <a href="http://laughingsquid.com/tweet-it-ipads-vs-iphones-a-parody-of-michael-jacksons-beat-it/" target="_blank">great parody of Michael Jackson&#8217;s</a> â€œBeat Itâ€ with the iPads versus the iPhones, right?</p>
<p><strong>Chris Arkenberg:  Oh, really?</strong></p>
<p><strong>Tish Shute:</strong> I tweeted it cos i thought it was quite funny and a little close to the bone!<br />
[laughter]</p>
<p>&#8220;ur wanna an ipatch 2 b the new fad?&#8221; #AR gets cameo in Twitter, iPads &amp; iPhone&#8217;s Michael Jackson-Inspired Parody via @mashable</p>
<p>It is hard to get away from the importance of eyewear when discussing AR!</p>
<p><strong>Chris Arkenberg: Yes, so the hardware, to me, is a big stumbling point right now, or itâ€™s a large gating factor, I think, for realizing what an augmented reality vision could really be like.  That it really does need to be heads up.  This holding the phone up in front of you is fun to demonstrate that itâ€™s possible, and itâ€™s valuable in some waysâ€¦</strong></p>
<p><strong>Tish Shute:</strong> And itâ€™s particularly nice in some applications like the planes app, the Acrossair subway app where you hold the phone down and get the arrow, right?</p>
<p><strong>Chris Arkenberg:  Yeah, the way-finding stuff I think is really valuable&#8230;</strong></p>
<p><strong>Tish Shute:</strong> Sixth Sense really caught peopleâ€™s imagination because it managed to deliver the gesture interface with cheap hardware, even if projection has limited uses (no brightly lit spaces or privacy for example!).</p>
<p>The other important and as yet unrealized part of the AR dream is  real-time communications.  Many interesting uses cases would require this. As you know that is my chief excitement, along with federation,  in the Google Wave Servers for (which should soon be released at <a href="http://googlewavedev.blogspot.com/2010/09/wave-open-source-next-steps-wave-in-box.html" target="_blank">Wave in a Box</a>) for <a href="http://www.arwave.org/" target="_blank">ARWave</a>.</p>
<p><strong>Chris Arkenberg:  Well my sense of Wave is that it was a ChromeOS protocol that they instantiated, or that they exhibited in the public deployment of Google Wave.  That that was a proof of their sort of low level architectural solution.  Because, you know, theyâ€™ve been rumored to be working on this cloud OS for some time. And so my sense is that Wave is actually one of their core components of that cloud OS, and that it just happened to incarnate for the public in a test run as Google Wave.</strong></p>
<p><strong>Tish Shute:</strong> I do hope that Wave  In the Box will lower the barriers to entry to people experimenting with this technology.  The FedOne server was just way too hard for most people to take the time to set up.  Of course, it is the brilliance of the Wave Operational Transform work that also poses problems in terms of ease of use. But Wave Federation Protocol is pretty innovative. And could even play an important role in a real time communications for AR eyewear connected to smartphones. The challenges that Wave takes on re real-time communications, federation, permissions and filters are pretty important ones for ARâ€¦</p>
<p><strong>Chris Arkenberg:  Especially when youâ€™re trying to federate a lot of permissions and filter a lot of data, which all of that gets even more important when you have a visual layer between you and the real world.</strong></p>
<p><strong>Tish Shute:</strong> You got it.  Yeah!</p>
<p><strong>Chris Arkenberg:  I think thatâ€™s really valuable real estate, both for third parties that want to get access to your eyes, as well as for you, as the user, who still needs to navigate through the phenomenal world and not be occluded by massive amounts of overhead data.</strong></p>
<p><strong>Tish Shute:</strong> Yes, I am sure Google has big plans for the next level of cloud computing and Wave looks at some key challenges.  I suppose federation poses some key business problems.  I think it was Michael Jones who said to me that it was a bit like socialism in that you have to be willing to give something up for the greater good.</p>
<p>Perhaps federation does not present enough appeal because of its challenges re business models?</p>
<p><strong>Chris Arkenberg:  Well, I wonder.  I mean thereâ€™s got to be some value for their ad platform as ads are moving more towards this personalized experience.  Advertising is becoming less of a shotgun blast and more of a very precise, surgical strike. So being able to track user data to such a fine degree to mobilize the appropriate ads around them wherever they are, on any platform, is certainly very valuable to Google and their ad ecology.</strong></p>
<p><strong>Tish Shute:</strong> Many people have high hopes that HTML 5 by lowering the barrier of entry forÂ  browser style AR could also pave the way for some interesting AR work..</p>
<p><strong>Chris Arkenberg:  Well, as much as I would hope that all the different players are going to come together and establish some shared set of standards, really, whatâ€™s happening is itâ€™s a rush to the finish line to be the firstâ€¦to get the most penetration in the marketplace so that Layar, for example, can say, â€œItâ€™s official.  Weâ€™re the platform.â€  And then the consolidation that will follow, where the Googles and the other big players like Qualcomm say, â€œOK, itâ€™s mature enough.  Weâ€™ll start buying up all the smaller companies.â€</strong></p>
<p><strong>And thatâ€™s where the real challenge is right now is that there are no standards.  Itâ€™s such an immature technology that you have a lot of different players trying to establish the ground rules.  And again, this is one of the challenges that faced public virtual worlds, is that you had a lot of different virtual worlds that werenâ€™t talking to each other in any particular way, and that they each had their own development platform. And so you end up with a very fractured ecosystem or set of competing ecosystems, which is kind of whatâ€™s happening with AR right now, where a developer has to choose between a number of different new platforms or hedge by deploying across multiple platforms. Basically, the web browser wars are set to be recapitulated by the AR browsers.</strong></p>
<p><strong>Among them, Layar and Metaio seem to be getting the most traction.  But thereâ€™s still not a really strong case for a unified development ecosystem to emerge.</strong></p>
<p><strong>Tish Shute:</strong> So a discussion of ecosystem development brings us back to the Points of Control Map I think. So what do you see as key points of interest for AR developers to watch in the  Points of Control Map? And where do you want to sort of put your bets, right?  We are still really waiting for mobile social AR to emerge into the mainstream.</p>
<p><strong>Chris Arkenberg:  Yes.  And thatâ€™s primarily the shortcoming of  the hardware itself, but also of the accuracy of current GPS technology.  Thatâ€™s another kind of gating factor, because again, AR wants to be able to express the data within a distinct place or object.</strong></p>
<p><strong>So in a lot of ways, other than kind of what weâ€™ve allowed for the broader entertainment purposes, for AR to really work, there needs to be more resolution in GPS location.  So for it to be truly locativeâ€¦because itâ€™s OK to tell Foursquare that youâ€™re in Bar X.  But if you want to be able to draw data directly on a wall within that bar, or do advertising over the marquee on the front, you need more factors to accurately register those images on a discreet location. So thatâ€™s another, sort of, aspect of the immaturity of AR, is that itâ€™s still very hard to register things on discreet locations without employing a number of diverse triangulation methods.</strong></p>
<p><strong>Tish Shute:</strong> Right.  The mobile AR games we see at the moment are really just faking a relationship to the physical world unless they rely on markers or some limited form of natural feature recognition which is really just a more sophisticated form of markers.  But the Qualcomm  SDK does offer some opportunities to tie AR media to the world more tightly as does the Metaio SDK. But in terms of a mobile social AR game that could be like the Cape of Zynga to FourSquare in Location Basin [see the <a href="http://map.web2summit.com/">Points of Control map</a>]&#8230; We havenâ€™t seen anything close yet.</p>
<p>AR should be able to bring the check-in mode to any object in our environment.</p>
<p><strong>Chris Arkenberg:  Yes, yes.  And thatâ€™s actually one of the early interests I had in the notion of social augmented reality. I wanted a way to tag my community with invisible annotations that only certain people could read, and found pretty quickly that thatâ€™s very difficult to do.  I mean you can kind of do some regional tagging, like on a  beach, for example, but if you wanted to tag the bench that was on the cliff above the beach, itâ€™s very difficult to do that using strictly locative reckoning.</strong></p>
<p><strong>Thereâ€™s all sorts of really cool social engagement that can be revealed when people are allowed to attach things to the world around them, to the streets they normally pass through, or the points of interest that they normally engage in. To be able to author on the fly on the streets and attach it discreetly to an object effectively.</strong></p>
<p><strong>Tish Shute:</strong> And yes we do have all kinds of markers and QR codes.  But Erick Schonfeld of Tech Crunch<a href="http://techcrunch.com/2010/10/18/likify-qr-code/" target="_blank"> made a good point that QR codes</a>: &#8220;Until QR code scanners become a default feature of most smartphones and  they start to become actually useful enough for people to go through the  trouble to scan them, they will remain a gee-whiz feature nobody uses.&#8221;</p>
<p><strong>Chris Arkenberg:  So again, this gets back to competing standards and who gets access to the phone stack, the bundle. Who gets the OEM dealâ€¦?</strong></p>
<p><strong>Tish Shute:</strong> Yes, the battles for the networks on the Handset Plains are pretty important for AR!<br />
[laughter] I think Layar have made some smart moves on The Handset Plains.</p>
<p>And there are a lot of acquisitions of nearfield technology to look at.Â   If I remember rightly Ebay bought the Red Laser tech from Occipital &#8211; now thereâ€™s any interesting company. Their panorama stuff rocks!</p>
<p><strong>Chris Arkenberg:  Right. Thereâ€™s a lot of nearfield stuff thatâ€™s supposed to hit all of the major mobile platforms in the next year or so.</strong></p>
<p><strong>I mean I think where this is heading, in my mind, is basically smart motes.  You know, little nearfield wide-range RFIDâ€™s that are the size of a small, tiny square that you could attach to just about anything and then program it to be a representative of your establishment or of an object, that then you can start to tag just about anything. I mean you canâ€™t rely on geo to do it, but if you have a Nearfield chip there that costs maybe like two cents to buy in bulk, and you can flash program it, then you can start to attach data to just about anything.</strong></p>
<p><strong>Tish Shute:</strong> Yes &#8216;cos some things still remain very difficult for near field image recognition technologies like Google Goggles.</p>
<p><strong>Chris Arkenberg:  Well, if your phone can interrogate for Nearfield devices, and it detects a chip in its near field, it can then interrogate that chip.  The chip may contain flash data on itself, or it may contain the local server in the establishment, or it may go to the cloud and get that data back.</strong></p>
<p><strong>Tish Shute:</strong> Yes there is moverment from the top and open source hardware like Arduino has created an opportunity for all sorts of creativity with instrumented environments.Â  And the handheld sensors in our pockets &#8211; our smart phones create a lot of opportunity for bottom up innovation too.</p>
<p><strong>Chris Arkenberg:  I mean thatâ€™s my guess.  If you look at what IBM is doing with their Smarter Planet initiative, theyâ€™re partnering with a lot of municipalities, and obviously with a lot of businesses and their global supply chains.</strong></p>
<p><strong>But theyâ€™re basically working with municipalities and all these stakeholders to instrument their territory, their business, or their city, as it were. So theyâ€™re working to provide embedded sensors and the software necessary to read them out and run reports &amp; viz.  And presumably that software can extend to include some sort of mobile device to interrogate the sensors and read the data.</strong></p>
<p><strong>Thatâ€™s kind of a top-down approach of a very large global company working with top-down governance bodies to do this. Simultaneously you have the maker crowd experimenting with Arduino and such to build from the grassroots, the bottom up approach.</strong></p>
<p><strong>And thatâ€™s primarily gated by the amount of learning it takes to be able to program these devices, to be able to hack them.  Typically, the grassroots creators who make these devices donâ€™t have the luxury of very large budgets to make things highly usable and Wizywig.</strong></p>
<p><strong>So the bottom up community is a sandbox to create tremendous amounts of innovation, because they are unconstrained by the very real financial needs of the top down innovators.  And so you get a lot of fascinating innovation, a very rich ecology from the bottom-up approach, but you donâ€™t get a lot of wide distribution.  But that does filter up to and inform the top down approach that has a lot more money to put into this stuff.  And it ultimately has to respond to the needs of the marketplace.</strong></p>
<p><strong>I mean if thereâ€™s an answer to the question of whether something like AR will succeed through the bottom-up grassroots approach or the top-down industry approach, I would say it would be both.  That handsets will be hacked to read the bottom up innovations of the maker community, and handsets will be preprogrammed to read the top down efforts of the IBMs of the world.</strong></p>
<p><strong>Tish Shute:</strong> Yes but i have to say it is very time-consuming hacking phones (I have just seen a few days suck up in this myself so that I could upgrade my G1 to try out the new ARWave client!).  I mean Android has obviously been the platform of choice because of openness but the business model of iPhone and its market share in the US sure make it important for developers.Â   Itâ€™s like you donâ€™t exist if you donâ€™t have an iphone app for what you are doing.</p>
<p><strong>Chris Arkenberg:  Yeah, and thatâ€™s the challenge, because at the end of the day developers prefer not to work for free and a solid, reliable mechanism to monetize their efforts becomes very appealing.</strong></p>
<p><strong>When I look at this map, the points of control map, itâ€™s really interesting to me, because what it says to me with respect to AR is each of these little regions that they have drawn out would be a great research project. So every single one of these should be instructive to AR.</strong></p>
<p><strong>In other words, we should be able to look at social networks, the land of search, or kingdom of ecommerce, and apply some very rigorous critical thinking to say, â€œHow would AR add to this engagement, this experience of gaming, or ecommerce, or content?â€</strong></p>
<p><strong>Looking at each of these individually and really meticulously saying, â€œOK, well yes, it can do this but how is that different from the current screen media experience, the current web experience that we have of all these types of things?â€  You know, how can augmented reality really add a new layer of value and experience to these? And I think that process would really trim a lot of the fat from the hopes and dreams of AR and anchor it down into some very pragmatic avenues for development.  And then you could start looking at, â€œWell, OK, what happens when we start combining these?â€ When we take gaming levels and plug that into the location basin, as you suggested.</strong></p>
<p><strong>Tish Shute: </strong> Some of the important platforms for AR donâ€™t appear to have spots on the map like Google Street View and other mapping technologies that hold out so much hope for AR, or am I missing something?</p>
<p><strong>Chris Arkenberg:  You mean on the map?</strong></p>
<p><strong>Tish Shute:</strong> Yes for the full vision of AR we need sensor integration, computer vision and cool mapping technologies to come together. Do you see where Google Maps and Google Street View&#8230; Where would they be?</p>
<p><strong>Chris Arkenberg:  Yeah, I mean itâ€™s certainly content, itâ€™s locationâ€¦</strong></p>
<p><strong>Are you familiar with Earthmine?</strong></p>
<p><strong>Tish Shute:</strong> Yes, yes I am, definitely.<a href="http://www.earthmine.com/index" target="_blank"> Earth Mine</a>, <a href="http://simplegeo.com/" target="_blank">Simple Geo</a>, Google Street View, user generated internet photo sets like  Flickr all of these could be very important to AR, potentially.</p>
<p><strong>Chris Arkenberg:  Well, and the interesting thing about Earthmine is that theyâ€™re effectively trying to do an extremely precise pixel to pixel location mapping.  So theyâ€™re taking pictures of cities just like Street View, except theyâ€™re using the Z axis to interrogate depth and then using very precise geolocation to attach a GPS signature to each pixel that theyâ€™re registering in their images. Effectively, you get a one-to-one data set between pixels and locations.  And so you can look at something like Google Street View, and if you point to the side of a building, in theory, it should know exactly where that is.</strong></p>
<p><strong>Theyâ€™re rolling this out with the idea of being able to tag augmented reality objects in layers directly to surfaces in the real world.  So thatâ€™s another approach to trying to get accurate registration and to try and create what are essentially mirror worlds. Then your Google Street View becomes a canvas for authoring the blended world, because if you plop a 3D object into Street View on your desktop, and then you go out to that location with your AR headset, youâ€™ll see that 3D object on the actual street.</strong></p>
<p><strong>Tish Shute:</strong> There was some experimental work with Google Earth as a platform for a kind of simulated AR but I suppose Google Earth doesnâ€™t figure in the battle for the network economy as it never got developed as a platform.</p>
<p><strong>Chris Arkenberg:  It hasnâ€™t tried to become a platform, to my  knowledge.  I mean I know some people are doing stuff with it, but as far as I know, Google owns it, they did it the best because they have the best maps, and thereâ€™s not a huge ecosystem of development thatâ€™s based around it other than content layers.</strong></p>
<p><strong>And my sense of everything else on the Points of Control map is theyâ€™re looking more at these sort of platform technologies thatâ€¦</strong></p>
<p><strong>Tish Shute:</strong> Yes, re platforms for growth for AR. Gaming consoles will probably emerge as a significant platform for AR this year.</p>
<p><strong>Chris Arkenberg:  There will be much more of a blended reality experience in the living room for sure, and with interactive billboards. Digital mirrors are another area.  So I mean if we kind of extend AR to include just blended reality in general, you know, this is moving into our culture through a number of different points. As you mentioned, it will be in the living room, it will be in our department stores where you can preview different outfits in their mirror. Weâ€™re already seeing these giant interactive digital billboards in Times Square and other areas.</strong></p>
<p><strong>Itâ€™s funny.  I mean for me, the sort of blended reality aside, the augmented reality, to me, is actually a very simple proposition in some respects.  When I look at this map, augmented reality is just an interface layer to this map in my mind, just as itâ€™s an interface layer to the cloud and itâ€™s an interface layer to the instrumented world. Itâ€™s a way to get information out of our devices and onto the world.</strong></p>
<p><strong>Tish Shute:</strong> The importance of leveraging existing platforms has become pretty clear but it is interesting Facebook definitely gave Zynga the opportunity but would Facebook be so big without Zingaâ€™s social gaming boost?</p>
<p><strong>Chris Arkenberg:  I feel that Zynga has definitely helped its growthâ€¦But I think Zynga has benefited a lot more from Facebook than Facebook has from Zynga.</strong></p>
<p><strong>Tish Shute:</strong> Zynga certainly proved you  could build a profitable business on Facebookâ€™s API!</p>
<p><strong>Chris Arkenberg:  They did.  And they also really validated the Facebook ecosystem and the platform.  They really extended itâ€¦ Zynga benefited from the massive social affordances that Facebook had already architected and developed. They brought gaming directly into Facebook, and particularly, this emerging brand of lightweight social gaming that when you sit it on top of a massive global social network like Facebook, it suddenly lights up.</strong></p>
<p><strong>Tish Shute: </strong>AR pioneers should quite carefully go through this map. There is so much to think about here. Iâ€™m a kind of fanatic about  Streams of  Activity in AR.  Real time brokerages and their potential for AR is something I am fascinated by.  That is one reason I love the ARWave project.</p>
<p>Anselm Hook, to me, is one of the great thinkers in this area of real time brokerages &#8211; with his project Angel, and the work of <a href="http://www.ushahidi.com/" target="_blank">Ushahidi,</a> which is now the platform <a href="http://www.ugotrade.com/2010/09/17/urban-augmented-realities-and-social-augmentations-that-matter-interview-with-bruce-sterling-part-2/" target="_blank">for augmented foraging (see here)</a>.  Anselm is now working on AR at PARC which is exciting.</p>
<p><strong>Chris Arkenberg:  Well, there are some challenges working with data streams. Presentation and filtering I think is a big challenge with any sort of stream.  Because obviously, you have a lot of potential data to manage, to parse, and to make valuable and comprehensible. So I think this is bound very closely to being able to personalize experiences, or having very discreet valuable experiences.  Disaster relief, for example, I think is an interesting idea that ties into the Pachube type of work. Where, if you had the headset and you were a relief worker, and you had immediate lightweight, non-intrusive, heads up alpha channel overlay, waypoint markers showing you all of the disaster locations or points of need, AR becomes extremely valuable, because itâ€™s a primarily hands-free environment.  This is why the military stuff is so interesting.</strong></p>
<p><strong>Tish Shute:</strong> Ha!  We are running  into the eye patch/shades/goggles/sexy specs thing again.  But filtering and making streams of activity relevant will be very interesting for  AR.Â  Again that it why I love the Wave Federation Protocol work because what they have built into their XMPP extensions.  You can have your real-time personal data streams, or community streams, or broadcast publicly &#8211; the permissions are built.</p>
<p>And Thomas Wrobelâ€™s original vision of these layers and channels is only fully expressed if you have the eyewear.</p>
<p><strong>Chris Arkenberg:  Well, and it becomes redundant if itâ€™s on a mobile. To use a very basic example, Twitter, obviously thereâ€™s an app you can view those streams of activity on the camera stream. But you can view that real time data on the screen.  Why do you need to see it heads up?</strong></p>
<p><strong>The reason I really pay attention to what the military is investing in, one, because they have a ton of money, but also because they tend to represent the core bio survival needs of the speciesâ€¦So, when I look at computing, I see this very obvious trend of computers getting smaller and smaller and closer and closer to us because theyâ€™re so valuable to our success.  They give us so much valuable information for engaging our world on a moment by moment basis.  So, of course now we have these tiny little handheld devices that give us access to the global knowledge depositories of human history, because itâ€™s so useful to have that stuff right at hand.</strong></p>
<p><strong>The only impediment now is that it takes one of our hands, if not both of them, to access it.  So if you are in the natural world, which we are all always in the natural world, ultimately, you want your hands free in order to engage with the world on a physical level.</strong></p>
<p><strong>I see computation, or rather, our access to computation is just going to get thinner and thinner, and weâ€™ll very soon move into eyewear, and inevitably, weâ€™ll move into brain computer interface in some capacity.</strong></p>
<p><strong>So when youâ€™re the disaster worker, or a deployed soldier, or the extreme mountain biker, or the heli-skier, or just an adventurer, there are a lot of very practical reasons to have access to information on a heads-up plane. I see AR as being so profound and so valuable, but weâ€™re getting a glimpse of it in its infancy, and itâ€™s got a ways to go to be able to really contain what it is weâ€™re reaching for.</strong></p>
<p><strong>Tish Shute:</strong> I agree.</p>
<p><strong>Chris Arkenberg:  And thatâ€™s been a big criticism Iâ€™ve had with all the existing AR implementations that Iâ€™ve seen, is that the UI really needs a revolution.  Itâ€™s very heavy handed.  It is not dynamic, even though itâ€™s supposed to be.  It does not take advantage of transparencies.  It treats the screen like a screen.  It doesnâ€™t treat the screen like a window onto the real world. When youâ€™re looking on the real world, you donâ€™t want a lot of occlusion.  You want very soft-touch indicators of a data shadow behind something that you can then address and then have it call out the information thatâ€™s important to you.</strong></p>
<p>Tish Shute:  Now, thatâ€™s a very nice kind of image youâ€™ve conjured for me there.  Do you see that more could be done on the smartphone than is being done within that?  Or are we like waiting for the old ishades?</p>
<p><strong>Chris Arkenberg:  I think thereâ€™s definitely a lot of room for improvement on the smartphone UI.  Nobodyâ€™s really played around with it much. And again, I think thatâ€™s in part that there hasnâ€™t been a really established platform with enough money to fund interesting UI work. We see it in some of the concept demos that float around every now and then.</strong></p>
<p><strong>I guess itâ€™s both a blessing and curse that Iâ€™m always five steps ahead of where Iâ€™m trying to get to.</strong></p>
<p><strong>Tish Shute:</strong> Yeah, I am familiar with that feeling!</p>
<p><strong>Chris Arkenberg:  So Iâ€™m always trying to reach for the vision even though itâ€™s a bit distant. I think thereâ€™s going to be a lot of development on the handsets.  But again, I think we need a lot of refinement.  We need a lot of real critical analysis of why this is a good thing.</strong></p>
<p><strong>To get back to the original point of Raimoâ€™s comment, it struck me.  And I knew it, but I just had set it aside as gimmickry. But heâ€™s right.  Content is a huge driver for this.  Just stuff thatâ€™s engaging, and fun, and cool, and shows off the technology so they can get enough money to make it through whatever Trough of Disappointment may be waiting.</strong></p>
<p><strong>Tish Shute:</strong> Yeah, donâ€™t underestimate the Planes of Content!Â  They are a great place to get interest and money to keep AR technology  moving on, right?</p>
<p><strong>Chris Arkenberg:  Yeah, yeah.  Because, you know, thereâ€™s a lot of freedom there.  And you can piggyback on all the rest of the content thatâ€™s out there and jump on memes and marketing objectives, etc&#8230;</strong></p>
<p><strong>And thereâ€™s a lot of stuffâ€¦Iâ€™m blanking on some of the names, but some of these historical recreations of city streets.  Thereâ€™s a street in London where they overlaid historical photos in a really compelling experience. [Museum of London - http://www.museumoflondon.org.uk/] Again, Iâ€™m completely forgetting the attributions, but hose are the type of things that can really be pursued on the existing platforms.  There is stuff thatâ€™s really compelling and really cool.</strong></p>
<p><strong>I heard of another interesting use case &#8211; and I should say that I canâ€™t find attributions to this anywhere on the web and I may be paraphrasing or mis-representing the actual work, but I think the concept is worth exploring anyway. But the idea was that you could take the locations of border checkpoints and conflict sites in Palestine and Israel and visually overlay them on an AR layer in San Francisco.  And it would do some sort of transposition where you could virtually view these things in San Francisco with the same locational mapping superimposed. So you could see where the checkpoints where.  You could see where the wall was.  You could see where suicide bombings were and where there had been conflicts.</strong> <strong>[I cannot find any citations for this!]</strong></p>
<p><strong>Tish Shute: </strong> But with an AR view?  But why would you use an AR view if you  are in San Francisco, then?</p>
<p><strong>Chris Arkenberg:  Because it superimposes two realities, translating the Gaza conflict into San Francisco as you are walking around. You can interrogate the world. Thereâ€™s a discoverability aspect where youâ€™re using the headset to reveal things, or the handset rather, to reveal things that you could not see otherwise in your city. It was done as an art piece, but as a provocative, obviously political art piece.</strong></p>
<p><strong>Tish Shute: </strong>Very interesting.  Iâ€™d love to see that. Because thatâ€™s interesting to get away from this idea that you actually have to sort of have this one to one relationship between the data and the world is kinda nice, isnâ€™t it?  Well, not one to one, but a very literalâ€¦getting away from that literalness is kind of good.</p>
<p><strong>Chris Arkenberg:  And thatâ€™s a possibility of virtual reality and augmented reality merging, that maybe virtual reality is actually going to do best by coming out of the box and writing itself over our reality, so that as you are walking around, you are no longer seeing San Francisco, but you are seeing part of Everquest or World of Warcraft.</strong></p>
<p><strong>Tish Shute: </strong> Well this is where Bruce Sterling gets to that point he made in <a href="http://augmentedrealityevent.com/2010/06/06/are-2010-keynote-by-bruce-sterling-build-a-big-pie/" target="_blank">his keynote for are2010</a>, that if we actually have viable AR eyewear, then you get the gothic stepsister of AR, VR rising from the grave!Â  He asks whether the very charm of augmented reality, is in fact that it adds rather than subtracts from your engagement with the world and that getting get sucked back into the black hole of VR might not be so great.</p>
<p><strong>Chris Arkenberg:  And then you get all sorts of interesting challenges to social cohesion if you have a lot of different people experiencing very different worlds, effectively.  That if there is no real consensual reality and a majority of your local populous is, in fact, experiencing very different and unique versions of the world, what does that do to social cohesion?  How does that reinforce tribalism, for example, when only you and certain others get to opt in to a particular layer view of the world?</strong></p>
<p><strong>Tish Shute:</strong> Yes Jamais Cascio wrote an interesting piece on that issue on AR and social cohesion a while back.</p>
<p>An eye patch is a more logical vision than the goggles in many ways but I suppose the loss is stereo vision?</p>
<p><strong>Chris Arkenberg:  And actually, there were developments in military helicopter technology many years ago that used a single pane square of glass over the eye mounted to the helmets of pilots.  And then they drew various bits of heads-up information on it. So that ensures that youâ€™re having a real strong engagement with the real world, which, obviously, when youâ€™re a helicopter pilot is quite important.  But you still have access to the data layer of  the invisible world.</strong></p>
<p><strong>Tish Shute:</strong> I just went to <a href="http://www.cloudera.com/company/press-center/hadoop-world-nyc/" target="_blank">Hadoop World</a> and I have to say, I was awestruck about how big thatâ€™s got.  I mean <a href="http://hadoop.apache.org/" target="_blank">Hadoop</a> has gone from like zero to huge in just a few years.  I mean itâ€™s just like now everyone has the power of the Google big table at their fingertips.</p>
<p>Whatâ€™s the play for AR in the land of search?</p>
<p>I could imagine Hadoop being very powerful tool for AR analytics?</p>
<p>Have you got any thoughts on the land of search and AR? Of course visual search is proceeding at a fast pace and there is a lot of promise for integrations with AR in the future but the latency for visual search is still pretty high?</p>
<p><strong>Chris Arkenberg:  In the near term, not a lot.  In the medium term, thereâ€™s a larger trend towards virtual agents that you can program or teach to keep watch over things for you as an effort to scale down the data overload.  So search is something thatâ€™s going to become more personalized and more active.  Thereâ€™s a movement to make it so people can essentially deputize these agents to be always searching for them; to be out there looking for the things that they have told these agents are important to them.</strong></p>
<p><strong>So active search for AR I think presents some challenges, obviously because you need to do text input, typically, or voice input.  Voice input, I think, is much more achievable than text input for AR.  But I can certainly imagine an AR layer that is being serviced by these agents that we have roaming around the web for us reconciling their visual view of the world with our personalizations. AR apps are contextually aware so it knows that if youâ€™re downtown, itâ€™s not going to be giving you a ton of information about Software as a Service infrastructure, or what have you.  But that, instead, itâ€™s going to be handing you little tidbits about a particular clothing brand youâ€™ve opted in to follow and information about  music venues &amp; schedules, for example.  Or perhaps youâ€™ll be on the lookout for other users that have opted in to publicly tag themselves as a member of this or that affinity.</strong></p>
<p><strong>I keep coming back to this idea of AR as really just a simple visualization layer that all of these other technologies can potentially feed into.  So in that sense, search becomes a passive thing that AR is just simply presenting to you in a heads-up, hands-free, or potentially hands-free environment.</strong></p>
<p><strong>Tish Shute:</strong> Yes, the big challenge is the stepping stones to that point! Small steps that keep interest going into developing the underlying technology (and not just in research labs!) that will bring us that interface.Â  We have seen some movement already with Qualcomm.</p>
<p><strong>Chris Arkenberg:</strong> And there are bandwidth issues as well, as we can see with the Google Goggles, which is a great idea of visual search.  But you have to take a picture and send it to the cloud and wait for your results.  Itâ€™s not a real-time dynamic interrogation of the world.</p>
<p><strong>Tish Shute:</strong> Yes we are really only at the very beginning of  AR being ready for prime time.. it would be interesting to ask AR developers how many of them use AR on a daily basis.</p>
<p><strong>Chris Arkenberg:  I think a lot of us, weâ€™re just informed by the sci-fi myths and fascinated with the potential now thatâ€™s itâ€™s starting to become real. But I think we all kinda get that itâ€™s still extraordinarily young.  I mean the web is extraordinarily young. And AR is itself far younger in a lot of ways in its implementations.</strong></p>
<p><strong>Everybody has a lot of excitement about all of the great potentials that are being unleashed by this great wave of the Internet and the web and ubiquitous mobile computing.  So thatâ€™s why, you know, you look at that map and we talk about AR and you canâ€™t talk about any of the stuff without talking about all of it, in a lot of ways, particularly with something like AR where itâ€™s so ultimately agnostic and could be completely pervasive across all of these layers.</strong></p>
<p><strong>So my fascination is with the future, and I measure our progress towards it by the young nascent offerings from the platform players and the developers. And yeah, a lot of it isâ€¦itâ€™s akin to getting that first triangle on the screen in 3D.  You know, when the renderer finally works and you get a triangle on the screen, and you go, â€œOh my God, it renders.â€  And then you can start to really build polygons and build objects, and start doing boolian operations, and get light and rendering in there, and textures, and on, and on, and on.<br />
So Iâ€™m fascinated by the Layars and the Metaioâ€™sâ€¦<br />
[laughter]</strong></p>
<p><strong>Tish Shute:</strong> Yes and hats off to all the players in the emerging industry, Layar, Metaio, Ogmento, Total Immsersion, and all the others who are finding clever ways to bring fun aspects of  AR into the mainstream, and fuel interest to take the technology to the next level.</p>
<p><strong>Chris Arkenberg:  Absolutely.  And the hype cycle is very valuable.  It has really helped launch the AR industry.  Itâ€™s brought a lot of eyes, and itâ€™s brought a lot of money into the industry.  And itâ€™s forcing people like us to have these conversations to understand how to refine its growth and really focus on the potential in all these different venues, whether itâ€™s trying to save lives, or better understand your city, or have really compelling entertainment experiences.</strong></p>
<p><strong>Everybodyâ€™s excited, and everybodyâ€™s sharing, and everybodyâ€™s trying to move it forward in a way thatâ€™s the most productive.</strong></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2010/10/27/platforms-for-growth-and-points-of-control-for-augmented-reality-talking-with-chris-arkenberg/feed/</wfw:commentRss>
		<slash:comments>3</slash:comments>
		</item>
		<item>
		<title>Visual Search, Augmented Reality and a Social Commons for the Physical World Platform: Interview with Anselm Hook</title>
		<link>http://www.ugotrade.com/2010/01/17/visual-search-augmented-reality-and-a-social-commons-for-the-physical-world-platform-interview-with-anselm-hook/</link>
		<comments>http://www.ugotrade.com/2010/01/17/visual-search-augmented-reality-and-a-social-commons-for-the-physical-world-platform-interview-with-anselm-hook/#comments</comments>
		<pubDate>Sun, 17 Jan 2010 17:05:01 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Artificial general Intelligence]]></category>
		<category><![CDATA[Artificial Intelligence]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[online privacy]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[privacy and online identity]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[virtual communities]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[Anselm Hook]]></category>
		<category><![CDATA[AR Commons]]></category>
		<category><![CDATA[AR Consortium]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[ardevcamp]]></category>
		<category><![CDATA[are2010]]></category>
		<category><![CDATA[ARNY Meetup]]></category>
		<category><![CDATA[ARWave]]></category>
		<category><![CDATA[ARWave Wiki]]></category>
		<category><![CDATA[augmented reality conference]]></category>
		<category><![CDATA[augmented reality event]]></category>
		<category><![CDATA[augmented reality goggles]]></category>
		<category><![CDATA[augmented reality social commons]]></category>
		<category><![CDATA[brightkite]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[Davide Carnivale]]></category>
		<category><![CDATA[distributed AR]]></category>
		<category><![CDATA[distributed augmented reality]]></category>
		<category><![CDATA[federated search]]></category>
		<category><![CDATA[FourSquare]]></category>
		<category><![CDATA[Games Alfresco]]></category>
		<category><![CDATA[google goggles]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[gowalla]]></category>
		<category><![CDATA[graffitigeo]]></category>
		<category><![CDATA[hacking maps]]></category>
		<category><![CDATA[Head Map manifesto]]></category>
		<category><![CDATA[imageDNS]]></category>
		<category><![CDATA[imagemarks]]></category>
		<category><![CDATA[imagewiki]]></category>
		<category><![CDATA[location based services]]></category>
		<category><![CDATA[Map Kiberia]]></category>
		<category><![CDATA[Mikel Maron]]></category>
		<category><![CDATA[mobile internet]]></category>
		<category><![CDATA[mobile social]]></category>
		<category><![CDATA[mobile social interaction utility]]></category>
		<category><![CDATA[Muku]]></category>
		<category><![CDATA[neo-viridian]]></category>
		<category><![CDATA[Nokia's ImageSpace]]></category>
		<category><![CDATA[Ogmento]]></category>
		<category><![CDATA[open distributed AR]]></category>
		<category><![CDATA[OpenGeo]]></category>
		<category><![CDATA[paige saez]]></category>
		<category><![CDATA[photo-based positioning systems]]></category>
		<category><![CDATA[physical world platform]]></category>
		<category><![CDATA[placemarks]]></category>
		<category><![CDATA[Planetwork]]></category>
		<category><![CDATA[Platial]]></category>
		<category><![CDATA[point and find]]></category>
		<category><![CDATA[proximity based social networks]]></category>
		<category><![CDATA[snaptell]]></category>
		<category><![CDATA[social cartography]]></category>
		<category><![CDATA[social commons]]></category>
		<category><![CDATA[social search]]></category>
		<category><![CDATA[SpinnyGlobe]]></category>
		<category><![CDATA[Thomas Wrobel]]></category>
		<category><![CDATA[Tonchidot]]></category>
		<category><![CDATA[trust filters]]></category>
		<category><![CDATA[Viridian]]></category>
		<category><![CDATA[viridiandesign]]></category>
		<category><![CDATA[visual search]]></category>
		<category><![CDATA[Wave]]></category>
		<category><![CDATA[Wave Federation Protocol]]></category>
		<category><![CDATA[WhereCamp]]></category>
		<category><![CDATA[whurley]]></category>
		<category><![CDATA[yelp]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=5050</guid>
		<description><![CDATA[Visual search is heating up, and with it a key stage of turning the physical world into a platform is underway as images become hyperlinks to the world in applications like Google Goggles, Point and Find, and SnapTell &#8211; see this post by Katie Boehret.Â  And while there may be no truly game changing augmented [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/anselmhook.jpg"><img class="alignnone size-medium wp-image-5051" title="anselmhook" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/anselmhook-300x225.jpg" alt="anselmhook" width="300" height="225" /></a></p>
<p>Visual search is heating up, and with it a key stage of turning the physical world into a platform is underway as images become hyperlinks to the world in applications like <a href="http://www.google.com/mobile/goggles/#dc=gh0gg" target="_blank">Google Goggles</a>, <a href="http://pointandfind.nokia.com/" target="_blank">Point and Find</a>, and <a href="http://www.snaptell.com/" target="_blank">SnapTell</a> &#8211; <a href="http://solution.allthingsd.com/20100112/in-search-of-images-worth-1000-results/" target="_blank">see this post by Katie Boehret</a>.Â   And while there may be no truly game changing augmented reality goggles for a while, make no mistake, key aspects of our augmented view, factors that will have a lot to do with what we will actually see when an augmented vision of the world is a commonplace, are already in the works.Â  And, as Anselm Hook (pic above <a href="http://www.flickr.com/photos/caseorganic/2994952828/" target="_blank">from @caseorganic&#8217;s flickr</a>) notes:</p>
<p><strong>&#8220;There is a real risk of our augmented reality world being owned by interests which are not our own. There is a real question of when you hold up that AR goggle, what are you going to see?&#8221;</strong></p>
<p>Cooperating services, e.g., Google Earth, Maps, Streetview, Google Goggles, and leader in local search like Yelp (<a href="http://www.huffingtonpost.com/ramon-nuez/google-is-getting-ready-f_b_426493.html" target="_blank">see here</a>) would have an enormous ability to filter and control a mobile, social, context aware view of the physical world, and Google themselves see an ethical quandary.</p>
<p><strong> &#8220;A Google spokesperson says this app has the ability to use facial recognition with Goggles, but hasnâ€™t launched this feature because it hasnâ€™t been built into an app that would provide real value for users. The spokesperson also cites â€œsome important transparency and consumer-choice issues we need to think throughâ€ </strong><strong> (quote from Wall Street Journal Column</strong><a href="http://solution.allthingsd.com/20100112/in-search-of-images-worth-1000-results/" target="_blank"> by Katie Boehret)</a>.</p>
<p><a href="http://www.hook.org/" target="_blank">Anselm Hook</a> and <a href="http://paigesaez.org/" target="_blank">Paige Saez</a>, with great prescience, have been advocating a social commons for the placemarks and imagemarks to our physical world platform through a number of pioneering projects, including <a href="http://imagewiki.org/" target="_blank">imagewiki</a>.Â Â  I have interviewed both Anselm and Paige (upcoming) in depth, recently.Â  My talk with Anselm was nearly three hours long!Â  So I am publishing the transcript in two parts.</p>
<p>Understanding what it means to have a social commons forÂ  our physical world platform, and augmented reality, are key questions for all of us to think about, but especially important for those of us involved in the emerging industry of augmented reality.</p>
<p>Anselm <a href="http://blog.makerlab.org/2009/11/augmentia-redux/">notes</a> :</p>
<p><strong>â€œThe placemarks and imagemarks in our reality are about to undergo that same politicization and ownership that already affects DNS and content. Creative Commons, Electronic Frontier Foundation and other organizations try to protect our social commons. When an image becomes a kind of hyperlink â€“ thereâ€™s really a question of what it will resolve to. Will your heads up display of McDonalds show tasty treats at low prices or will it show alternative nearby places where you can get a local, organic, healthy meal quickly? Clearly thereâ€™s about to be a huge ownership battle for the emerging imageDNSâ€</strong></p>
<p>The mobile internet is moving beyond the internet in your pocket phase of mobility with mobile, social, proximity-based, context aware networks like <a href="http://www.foursquare.com/">FourSquare</a>, <a href="http://gowalla.com/" target="_blank">Gowalla</a>, <a href="http://brightkite.com/" target="_blank">Brightkite</a> and <a href="http://www.geograffiti.com/">GraffitiGeo</a> (see <a href="http://smartdatacollective.com/Home/23811">Smart Data Collective</a>) likely, soon, to start to take precedence over other forms of social network.</p>
<p>Regardless of the timeline for true augmented reality &#8211; 3D images &amp; graphics tightly registered to the physical world,Â  proximity-based social networking and real time search are already taking us into a hyper-local mode and the realm of augmented reality which is <strong><strong>&#8220;inherently about who you are, where you are, what you are doing, and what is around you&#8221; </strong></strong>(<a href="http://curiousraven.squarespace.com/" target="_blank">Robert Rice</a> &#8211; see <a href="http://www.ugotrade.com/2009/01/17/is-it-%E2%80%9Comg-finally%E2%80%9D-for-augmented-reality-interview-with-robert-rice/" target="_blank">here</a>).<strong><strong> </strong></strong>The ground is being prepared for augmented reality now.<strong><strong><br />
</strong></strong></p>
<p>If you have been reading Ugotrade, you will know I have been actively involved in developingÂ  an open, distributed AR platform/mobile social interaction utility for geolocated data based on the Wave Federation Protocol &#8211; AR Wave a.k.a Muku &#8211; &#8220;crest of a wave&#8221; (see my posts <a href="http://www.ugotrade.com/2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/" target="_blank">here</a>, <a href="http://www.ugotrade.com/2009/12/04/ar-wave-project-an-introduction-and-faq-by-thomas-wrobel/" target="_blank">here</a> and <a href="http://www.ugotrade.com/2009/10/13/ar-wave-layers-and-channels-of-social-augmented-experiences/" target="_blank">here</a> for more on this project, and the <a href="http://arwave.wiki.zoho.com/HomePage.html" target="_blank">AR Wave Wiki</a> here).Â  Federation is, I believe, one vital aspect to developing a social commons for augmented reality and the physical world platform.</p>
<p>Also, a bit of news, I am co-chairing the upcoming <a title="Augmented Reality Event (are2010) Opens Call For Speakers" href="http://augmentedrealityevent.com/2010/01/17/augmented-reality-event-2010-opens-call-for-speakers/">Augmented Reality Event (are2010)</a> with <a href="http://gamesalfresco.com/about/" target="_blank">Ori Inbar</a> of <a href="http://gamesalfresco.com/" target="_blank">Games Alfresco</a> and <a href="http://ogmento.com/" target="_blank">Ogmento</a>, <a href="http://whurley.com/" target="_blank">whurley</a>.Â  Sean Lowery, <a href="http://www.innotechconference.com/pdx/Details/other.php" target="_blank">Prospera</a>, is the event organizer, and <a title="Augmented Reality Event (are2010) Opens Call For Speakers" href="http://augmentedrealityevent.com/2010/01/17/augmented-reality-event-2010-opens-call-for-speakers/">are2010</a> has the support of the <a href="http://www.arconsortium.org/" target="_blank">AR Consortium</a>. Â  The <a title="Augmented Reality Event (are2010) Opens Call For Speakers" href="http://augmentedrealityevent.com/2010/01/17/augmented-reality-event-2010-opens-call-for-speakers/">are2010</a> web site is live and there is an <a title="Augmented Reality Event (are2010) Opens Call For Speakers" href="http://augmentedrealityevent.com/2010/01/17/augmented-reality-event-2010-opens-call-for-speakers/">Open Call For Speakers</a>.Â   You can submit your proposals and demos for one of the three tracks, business, technology, or production <a href="http://augmentedrealityevent.com/speakers/call-for-proposals/" target="_blank">on the web site here</a>.</p>
<p><a href="http://augmentedrealityevent.com/" target="_blank"><img class="alignnone size-medium wp-image-5101" title="are2010" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/are20101-300x60.png" alt="are2010" width="300" height="60" /></a></p>
<p><a href="http://www.wired.com/beyond_the_beyond/" target="_blank">Bruce Sterling</a> &#8220;prophet&#8221; ofÂ  augmented reality and more, &#8220;will deliver the most anticipated <a href="http://augmentedrealityevent.com/speakers/" target="_blank">Augmented Reality keynote</a> of the year.&#8221;</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/bruces-brasspost.jpg"><img class="alignnone size-medium wp-image-5105" title="bruces-brasspost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/bruces-brasspost-300x225.jpg" alt="bruces-brasspost" width="300" height="225" /></a></p>
<p>It didn&#8217;t surprise me when Anselm mentioned that Bruce Sterling was a key influence for his work on the geospatial web and augmented reality.Â  Anselm explained:</p>
<p><strong>&#8220;Iâ€™d seen <a href="http://www.viridiandesign.org/notes/151-175/00155_planetwork_speech.html" target="_blank">a talk by Bruce Sterling</a> at an event called Planetwork [May, 2000]. And that event was, for me, a turning point where I decided to focus full time on exactly what I cared about instead of doing things that were kind of similar to what I cared about.</strong> <strong>So, his influences is a pretty significant one to me at that exact moment.&#8221;</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/dhj5mk2g_490gcp7q6fn_b.png"><img title="dhj5mk2g_490gcp7q6fn_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/dhj5mk2g_490gcp7q6fn_b-300x80.png" alt="dhj5mk2g_490gcp7q6fn_b" width="300" height="80" /></a></p>
<p>For more see <a id="q2or" title="viridiandesign.org" href="http://www.viridiandesign.org/About.htm">viridiandesign.org</a> -Â  seems it is time for a &#8220;Neo-Viridian,&#8221;  revival!</p>
<p>This <a href="http://www.wired.com/beyond_the_beyond/2009/05/spime-watch-pachube-feeds/" target="_blank">post by Bruce Sterling on Pachube Feeds</a>, and Thomas Wrobel&#8217;s <a href="http://www.ugotrade.com/2009/08/19/everything-everywhere-thomas-wrobels-proposal-for-an-open-augmented-reality-network/" target="_blank">prototype design for open distributed augmented reality on IRC</a>, were key inspirations for me when I began thinking about the potential of Google Wave Federation protocol for augmented reality.Â  I had been exploring <a href="http://www.pachube.com/" target="_blank">Pachube</a> and deeply interested in <a href="http://www.ugotrade.com/2009/01/28/pachube-patching-the-planet-interview-with-usman-haque/" target="_blank">the vision of Usman Haque</a>, but I had a real <a href="http://www.ugotrade.com/2009/06/02/location-becomes-oxygen-at-where-20-wherecamp/" target="_blank">aha moment</a> when I read this :</p>
<p><strong>â€œ(((Extra credit for eager ubicomp hackers: combine this [pachube feeds] with Googlewave, then describe it in microsyntax. Hello, 2015!)))â€</strong></p>
<p>I think the AR Wave group will earn the extra credit and more very soon!Â  <a href="http://need2revolt.wordpress.com/about/" target="_blank">Davide Carnovale, need2revolt</a>, and <a href="http://www.lostagain.nl/" target="_blank">Thomas Wrobel</a><strong> </strong>have been leading the coding charge, and there will be a very early AR Wave demo soon, perhaps as soon as the <a href="http://www.meetup.com/arny-Augmented-Reality-New-York/" target="_blank">Feb 16th ARNY Meetup</a>.Â  <strong><br />
</strong></p>
<p>Open access to the creation of view that will eventually find its way into AR goggles, will depend not only on the power ofÂ  an open distributed platform for collaboration like the AR Wave project.Â  Our augmented reality view will be constructed through complex &#8220;hybrid tracking and sensor fusion techniques&#8221; (Jarell Pair), cooperating cloud data services, powerful search and computer vision algorithms, and apps that learn by context accumulation will drive our augmented experiences, and at the moment, these kind of resources, at least at scale, are for the most part in private hands.</p>
<p>In the interview below, Anselm&#8217;s discussesÂ  how trust filters, and <span id="zuat" title="Click to view full content">being able to publicly permission your searches so that other people can respond and so that people can reach out to you, and the democratization of data in general, are even more of a concern </span>with augmented reality and hyper local search<span id="zuat" title="Click to view full content">.</span> The task of understanding what it means to haveÂ  a social commons for the outernet remains an open, and pressing question.</p>
<p>Anselm explains (see full interview below):</p>
<p><strong><span id="e18n" title="Click to view full content">&#8220;as we move towards a physical internet where there&#8217;s no clicking and there&#8217;s no interface and the computer&#8217;s just telling you what it thinks you&#8217;re looking at, translating, you know, an image of a billboard to the name of the rock star who&#8217;s on that billboard, or translating the list of ingredients on a can of soup to the source outlets where it thinks that, those ingredients came from. When you have that kind of automated mediation, the question of trust definitely arises.</span></strong></p>
<p><strong><span id="e18n" title="Click to view full content"> And we haven&#8217;t seen the Clay Shirkys or the Larry Lessigs of the world start to talk about this yet.Â  Although I suspect that in the next four or five years that the zero click interface will become the primary interface, that we&#8217;ll have&#8230;we&#8217;ll come to assume that what we see with the extra enhanced data we get projected onto our view is the truth. Yet, at the same time, there is just no structure or mechanism even being considered for a democratic ownership of it.&#8221;</span></strong></p>
<h3>Augmented Reality will emerge through sensor fusion techniques &amp; cooperating cloud services</h3>
<p>In 2010, sensor fusion techniques, computer vision technology in conjunction with GPS and compass data will create data linking that can enable the kind of augmented reality that has been the stuff of imagination for nearly four decades (see <a href="http://laboratory4.com/2010/01/the-reality-of-augmented-reality/" target="_blank">Jarrell Pair&#8217;s post).</a></p>
<p>Putting stuff in the world in 3D is of course key to the original vision of augmented reality, and one of its biggest challenges.Â  Augmented reality is going to be implicated in a real time mapping of the world at an unprecedented scale and granularity.Â  We have barely an inkling of the implications of this now.</p>
<p>Anselm and Paige have been working in the heart of the social cartography movement for nearly a decade.Â  The vision and experience of this community is vital to understanding how augmented reality and the world as a physical platform can evolve into something that benefits people and allows them &#8220;to have a better understanding of the opportunities around them.&#8221;</p>
<p>We have been hacking maps for millenia â€“Â  â€œfrom conceptual story mapping, to colloquial mapping in European development and the cartographic renaissance created by the global voyages and rediscovery of Ptolemyâ€™s mapsâ€ (<a href="http://highearthorbit.com/" target="_blank">Andrew Turner</a>).Â  And, recently, initiatives on a public-provided GIS, like <a href="http://opengeo.org/" target="_blank">OpenGeo</a>, have led the way toward more open, interoperable, geospatial data.</p>
<p>Mapping takes on a new an crucial role to augmented reality.Â  <a href="http://www.slashgear.com/nokia-image-space-adds-augmented-reality-for-s60-3067185/" target="_blank">Nokia&#8217;s ImageSpace</a> is beginning to do what many thought Microsoft would do with photosynth two years ago.</p>
<p>And, if we see these kind of projects developed into a &#8220;photo-based positioning systems&#8221; -Â  &#8220;3d models of the environment to cover every possible angle, and then software that can work out in reverse based on a picture precisely where you are and where your facing&#8221; (Thomas Wrobel), we would find augmented reality leap forward over night.</p>
<p>It is time to take very seriously the vast opportunities and potential pitfalls of an augmented world.</p>
<p><strong><span id="vix9" title="Click to view full content">&#8220;when you are mediating the translation layer between the image and the data, then there is an opportunity for you to control it, and that opportunity is hard to resist.Â  It is hard to choose not to own that opportunity. It is an advertising opportunity. It is a revenue opportunity. It is a chance to send a message and a tone. </span></strong></p>
<p><strong><span id="vix9" title="Click to view full content">I know that Google and companies like that are keenly aware of the kinds of roles they donâ€™t want to hold, but it is sometimes seductive to think about them. And I am afraid that we, as a community, need to assert an ownership, kind of a commons, over how computers will translate what they see to information that we perceive.&#8221;</span></strong></p>
<p>There are some initiatives emerging.Â  <a href="http://www.tonchidot.com/" target="_blank">Tonchidot</a> (who <a href="http://www.techcrunch.com/2009/12/08/tonchidot-sekai-camera-funding/" target="_blank">closed on $4 million of VC for augmented reality </a>last December) has helped create the <a href="http://translate.google.com/translate?client=tmpg&amp;hl=en&amp;u=http%3A%2F%2Fwww.arcommons.org%2F&amp;langpair=ja%7Cen" target="_blank">AR Commons</a> in Japan.Â  <a href="http://www.tonchidot.com/corporate-profile.html" target="_blank">CFO of Tonchidot</a>, <a href="http://www.linkedin.com/ppl/webprofile?action=vmi&amp;id=499984&amp;pvs=pp&amp;authToken=r8TF&amp;authType=name&amp;trk=ppro_viewmore&amp;lnk=vw_pprofile" target="_blank">Ken Inoue</a> explained in <a href="http://www.ugotrade.com/2009/09/17/tonchidot-taking-augmented-reality-beyond-lab-science-with-fearless-creativity-and-business-savvy/" target="_blank">an interview with me in September 2009</a>.</p>
<p>&#8220;<strong>We feel that public data, such as landmarks, government facilities, and public transport should be shared. We see an AR world where people can readily and easily access information by just seeing â€“ quick, easy, and efficient.Â  And because of this ease and intuitiveness, children, the elderly and handicapped will surely benefit.Â  AR could help create a safer society.Â  Warnings, alerts, and safety information could save lives and avoid disasters.Â  These are what we, and <a href="http://translate.google.com/translate?client=tmpg&amp;hl=en&amp;u=http%3A%2F%2Fwww.arcommons.org%2F&amp;langpair=ja%7Cen" target="_blank">AR Commons</a> would like to tackle in the not so distant future.&#8221;</strong></p>
<p>But<strong> </strong>the task of building a social commons for the physical world platform has only just begun.<strong><br />
</strong></p>
<p><strong><span title="Click to view full content"><br />
</span></strong></p>
<h3>Interview with Anselm Hook</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/anselm31.jpg"><img class="alignnone size-medium wp-image-5085" title="anselm3" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/anselm31-300x225.jpg" alt="anselm3" width="300" height="225" /></a></p>
<p><em>photo from <a href="http://www.flickr.com/photos/anselmhook/3832691280/in/set-72157621946362509/" target="_blank">Anselm&#8217;s Flickr stream here</a></em></p>
<p><span id="u2mq" title="Click to view full content"><strong>Tish Shute:</strong> We <a href="http://www.ugotrade.com/2009/06/02/location-becomes-oxygen-at-where-20-wherecamp/" target="_blank">first met last year </a></span><span id="zjlm" title="Click to view full content"><a href="http://www.ugotrade.com/2009/06/02/location-becomes-oxygen-at-where-20-wherecamp/" target="_blank">at Wherecamp</a>. </span><span id="suh4" title="Click to view full content">The start of 2009 was I think</span><span id="e_r5" title="Click to view full content"> the &#8220;OMG finally&#8221; moment for augmented reality and</span><span id="wo16" title="Click to view full content"> in less than a year AR, at least in proto forms, AR is breaking into the mainstream now! You are one of the founding visionaries/philosophers/hackers of the geo web and you have been thinking about geo web and AR for a long time &#8211; <a href="http://hook.org/headmap" target="_blank">all the way back to the legendary Head Map Manifesto</a>, and before.Â  Mostly recently you led the way in the very successful <a href="http://www.ardevcamp.org/wiki/index.php?title=Main_Page" target="_blank">ARDevCamp</a> in Mountain View. </span><span id="kn-y" title="Click to view full content"> Could you start by telling me a little bit about the history of your pioneering work with geolocated data?</span></p>
<p><strong>Anselm Hook: </strong>I am a long time Geo fanatic. I&#8217;m really interested in social cartography and what some people call public-provided GIS, thatâ€™s some language that people use. Anyway, my personal interest, when I talk to people who are non-technical (and it&#8217;s been a long term interest in the way I phrase it) is that I want to help people see through walls. So, the goal is very simple. I want people to have a better understanding of opportunities around them, the landscape around them. I always get frustrated when people make bad decisions because of a lack of information, especially when it&#8217;s related to their community and related to their environment. But, plainly put, I really just want &#8220;to help people see through walls&#8221;. It&#8217;s a very simple goal.</p>
<p><strong>Tish Shute:</strong> I know you worked on <a href="http://platial.com/" target="_blank">Platial</a>, which is really one of my favorite social mapping applications. It really broke new ground. What was the history of that? How did you get involved with Platial?</p>
<p><strong>Anselm Hook:</strong> Thatâ€™s an interesting question. It actually started at around 2000 when I saw Bruce Sterling talk. I had been writing video games for many years, and I was quite good at it, and I enjoyed it. But, the reasons I was doing it diverged from why the industry was doing it. I was making video games because I like to make shared spaces for my friends to play in and to share experience. I really enjoyed making shared environments. I worked on <a id="jrn-" title="BBS's" href="http://en.wikipedia.org/wiki/Bulletin_board_system">BBS&#8217;s</a> and my friends and I were always making these collaborative shared environments.</p>
<p>Once the video game industry kind of started to take off, I started to do high performance, 3D interactive video games and making compelling shared spaces, and it was a lot of fun. But, the frustration for me was that there was a huge industry growing around it and became very commercial. Although it paid well, it started to diverge from my values which were more centered around community environments, and shared understanding.</p>
<p><strong>Tish Shute:</strong> Yes very rapidly, the big games kind of devolved from the social aspects and became more and more into single player really, didnâ€™t they?</p>
<p><strong>Anselm Hook:</strong> It was the way, actually, because even though often you were in a many player world, you werenâ€™t collaborating, everything else became just a target.Â  I liked the idea of deep collaboration that calls the kind of playful space you see in IRC, or in the real world, where people are solving real world problems.</p>
<p>And I grew up in the Rockies, and I was always had a lot of access to the outside. So, I saw shared spaces and collaboration as a way to protect our environment. [ To step back ] I think people used different metrics <span id="gozb" title="Click to view full content">for measuring their choices in the world and many people have a value system centered around minimization of harm: making sure that the people are not hurt. But, my value system is different. I personally believe that protecting the planet is more important: to maximize biodiversity. I feel like protecting people around me comes from protecting the ecosystems they live in.</span></p>
<p><strong>Tish Shute:</strong> Thatâ€™s interesting, isnâ€™t it, because the history of Keyhole was really that, wasnâ€™t it.Â  Keyhole later became Google Earth, but I mean it began out of a project to look at what was going on in the ecosystem over Africa at that time, didnâ€™t it?<br />
<strong><br />
Anselm Hook:</strong> Yes, in fact many peopleâ€™s projects are stemming from an environmental concern. <a id="zxy9" title="Mikel Mironâ€™s" href="http://brainoff.com/weblog/">Mikel Maronâ€™s</a> works for example &#8211; heâ€™s doing <a id="euvm" title="Map Kiberia" href="http://mapkibera.org/">Map Kiberia</a>, and he also worked on OpenStreetMaps.</p>
<p><strong>Tish Shute:</strong> Map Kiberia &#8211; that is the new project?</p>
<p><strong>Anselm Hook:</strong> Oh, yes his project is called <a id="r7ie" title="Map Kiberia" href="http://mapkibera.org/">Map Kiberia</a>. Heâ€™s mapping a city in Africa.<br />
[For more see <a id="ngn." title="Map Kiberia's YouTube Channel" href="http://www.youtube.com/user/mapkibera">Map Kiberia&#8217;s YouTube Channel</a> &#8211; <a id="amqx" title="photo below" href="http://www.flickr.com/photos/junipermarie/4098163856/" target="_blank">photo below</a> from <a href="http://www.flickr.com/photos/junipermarie/">ricajimarie</a> ]</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/dhj5mk2g_487qfcv76ft_b.jpg"><img class="alignnone size-medium wp-image-5052" title="dhj5mk2g_487qfcv76ft_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/dhj5mk2g_487qfcv76ft_b-300x199.jpg" alt="dhj5mk2g_487qfcv76ft_b" width="300" height="199" /></a></p>
<p><strong>Tish Shute:</strong> Right, great!</p>
<p><strong>Anselm Hook:</strong> When I started to look at GIS and mapping I started to meet people who had a very similar background. What happened to me is I kind of stepped away from games around the year 2000. Iâ€™d seen a talk by Bruce Sterling at an event called <a id="e8dn" title="PlaNetwork" href="http://www.conferencerecording.com/newevents/pla20.htm">PlaNetwork</a>. And that event was, for me, a turning point where I decided to focus full time on exactly what I cared about instead of doing things that were kind of similar to what I cared about. So, his influences is a pretty significant one to me at that exact moment.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/dhj5mk2g_490gcp7q6fn_b.png"><img class="alignnone size-medium wp-image-5053" title="dhj5mk2g_490gcp7q6fn_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/dhj5mk2g_490gcp7q6fn_b-300x80.png" alt="dhj5mk2g_490gcp7q6fn_b" width="300" height="80" /></a></p>
<p>[For more see <a id="q2or" title="viridiandesign.org" href="http://www.viridiandesign.org/About.htm">viridiandesign.org</a> &#8211; seems that it is time for a &#8220;Neo-Viridian,&#8221;  revival.]</p>
<p><strong>Tish Shute:</strong> Itâ€™s interesting because now your paths are crossing again with augmented reality. You are on the same wavelength again.</p>
<p><strong>Anselm Hook:</strong> Itâ€™s funny, actually, Iâ€™ve had a couple of brief overlaps in that way.Â  Well, so in 2000 I<span id="mdsf" title="Click to view full content"> went to see this talk and I did a small project called &#8212; well, I called it <a id="bx3u" title="SpinnyGlobe" href="http://github.com/anselm/SpinnyGlobe">SpinnyGlobe</a>. What I did is I mapped protests from a number of websites onto a globe to show the level of community opposition to the pending war in Iraq. It was the first time there had been a protest before a war. So, it was very interesting to me. [ See <a href="http://hook.org/headmap" target="_blank">http://hook.org/headmap</a> ]<br />
<strong><br />
Tish Shute:</strong> Thatâ€™s really fascinating. Do you have any pictures of that you could send me? </span></p>
<p><span id="r0h_" title="Click to view full content"><a href="http://www.flickr.com/photos/anselmhook/1747152617/sizes/m/in/set-72157602696188420/" target="_blank"><img class="alignnone size-medium wp-image-5054" title="dhj5mk2g_492ffct2df4_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/dhj5mk2g_492ffct2df4_b-300x225.jpg" alt="dhj5mk2g_492ffct2df4_b" width="300" height="225" /></a></span></p>
<p><span id="mdsf" title="Click to view full content">photo from <a id="j05v" title="anselm's flickrstream" href="http://www.flickr.com/photos/anselmhook/1747152617/sizes/m/in/set-72157602696188420/">anselm&#8217;s flickrstream</a></span></p>
<p><strong>Tish Shute:</strong> Yes, Iâ€™ll definitely look <a id="ua2l" title="SpinnyGlobe" href="http://github.com/anselm/SpinnyGlobe">SpinnyGlobe</a><span id="m0:j" title="Click to view full content"> up. It sounds very interesting.Â  One of the aspects of your work on geo-located data projects like this and <a id="h.gx" title="Platial" href="http://platial.com/">Platial</a> is that you really started to develop this idea of a culture of place, about how people make place. This was the wake up call to me regarding the power of networks combined with geo-data. </span></p>
<p><span id="m0:j" title="Click to view full content">We are hoping to extend this idea into augmented reality with the an open distributed platform for AR so that we can collaboratively map our worlds from the perspective of who we are, where we are, and what we are doing.Â  I know youâ€™ve just done some work recently in augmented reality.Â  I know you put the code up already. </span></p>
<p><span id="m0:j" title="Click to view full content">By the way, I love the way you take your philosophy into the way you make code &#8211; the practice of making some code, trying some things out, making it all public and publishing your findings, you know, your comments on that experience.Â  Perhaps you could recap sort of how you picked up recently on the state of play with augmented reality and what aspects you looked at, and what came out of that experience?</span></p>
<p><strong>Anselm Hook:</strong> So, itâ€™s a very simple trajectory. Coming out of the work I had done, <a id="cs18" title="Platial" href="http://platial.com/">Platial</a>, among other projects and I started to just look at the hyper-local and I suddenly realize that even those services werenâ€™t really speaking to living, and how to really see and solve local problems. What was missing was a sense of context.</p>
<p>The map doesnâ€™t know how youâ€™re feeling, it doesnâ€™t know if youâ€™re in a hurry, it doesnâ€™t know what you want, itâ€™s very static. Even the web maps are very static. And augmented reality for me I started to recognize as a combination of &#8212; well &#8212; itâ€™s probably collision of many forces, many forces that weâ€™re all a part of. Weâ€™ve also didnâ€™t realize that the real-time web is really important, itâ€™s part of<span id="bja1" title="Click to view full content"> what AR is about.</span></p>
<p>We have all started to realize that the context is important. You know, your personal disposition, your needs, if you want to be interrupted or not. That is the kind of thing that the ubiquitous computing crowd has talked about. We started to recognize that there are sensors everywhere, and the ambient sensing communities talked about that. So what is funny for me about augmented reality is I started realizing it is just a collision of many other trends into something bigger.</p>
<p>Everything else we thought was a separate thing is actually just part of this thing. Even things like Google Maps or mapping systems we think are so great are really just kind of almost an aspect of a hyper-local view. You actually donâ€™t really care what is happening 10 blocks away or 100 blocks away. If you could satisfy those same interests and needs within a single block, one block away, you would probably be really happy. You really just want to satisfy needs and interests, find ways to contribute, or get yourself fed, or whatever it is you want. And AR seemed to be the playground to really explore the human condition.</p>
<p><strong>Tish Shute:</strong> Anyway, I think one of the things that has been very amazing this year is we to have the good mediating devices that, for the first time, give us compasses, GPS, and accelerometers. But one sort of missing pieces with AR at the moment is [tracking, mapping, and registration] &#8211; the kind of things colloquial mappings of the world could be of great help with.</p>
<p>We have seen mapping coming out of the Flickr data, e.g., the University of Washington, put the maps together from the geo-tagged Flickr photos. Now if we could have that linked up with AR, then we have the kind of mapping we need to kind of really hook the geo-data onto the world in a way that goes beyondâ€¦you know, what compass and GPS can really deliver is pretty minimal at the moment.</p>
<p><strong>Anselm Hook</strong>: There is a real risk of our augmented reality world being owned by interests which are not our own. There is a real question of when you hold up that AR goggle, what are you going to see? Are you going to see corporate advertising? Are you going to see your friendsâ€™ comments or criticisms? It is going to be an Iran or a democracy, right? It is unclear.</p>
<p><span id="vix9" title="Click to view full content">Right now there are some disturbing trends I have noticed. I am a big fan of Google Goggles. I think it is a great project. But when you are mediating the translation layer between the image and the data, then there is an opportunity for you to control it, and that opportunity is hard to resist. It is hard to choose not to own that opportunity. It is an advertising opportunity. It is a revenue opportunity. It is a chance to send a message and a tone. </span></p>
<p><span id="vix9" title="Click to view full content">I know that Google and companies like that are keenly aware of the kinds of roles they donâ€™t want to hold, but it is sometimes seductive to think about them. And I am afraid that we, as a community, need to assert an ownership, kind of a commons, over how computers will translate what they see to information that we perceive.</span></p>
<p><strong>Tish Shute:</strong> Yes. And this is how we met, again, recently [over the project to create an open, distributed platform for AR using the Wave Federation Protocol]â€¦</p>
<p><span id="e18n" title="Click to view full content">This is something I feel really deeply is that, you know, basically we need the physical internet to be as open as, as the, as the internet, as the end-to-end internet has been. Or more so, actually, because the end-to-end internet has seen the trend has been to walled gardens.Â  Basically Facebook became enormous, an enormous walled garden which, I think, was despite, our predictions about them, [walled gardens] are the social experience really on the web.Â  It&#8217;s very much in walled gardens still and I, and I really feel that with the physical internet, we need to make great efforts not for it not just to be a series of small pockets of privately funded walled gardens.</span></p>
<p>There needs to be some kind of communications infrastructure that keeps it open so that was when I got interested in looking at the Wave Federation Protocol because it was a real time, you know, an open real time protocol that could possibly be a basis for that. But I think the point you&#8217;ve talked to just now, the mapping of the world and who has the &#8220;goggles&#8221;, i.e., the image data, image databases, that make the world meaningful is really, that&#8217;s still a, it&#8217;s still a BIG question [i.e. who controls the view?].</p>
<p>When I saw <a id="ewxn" title="ImageWiki" href="http://imagewiki.org/">ImageWiki</a>, [I realized] that is a piece that is vital for, for augmented reality. We need to have a huge social effort to be involved in this,Â  linking in and creating theÂ  physical internet, in creating the image hyperlinks that will make that meaningful.</p>
<p><span title="Click to view full content"><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/dhj5mk2g_493fv23rg33_b.png"><img class="alignnone size-medium wp-image-5055" title="dhj5mk2g_493fv23rg33_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/01/dhj5mk2g_493fv23rg33_b-300x219.png" alt="dhj5mk2g_493fv23rg33_b" width="300" height="219" /></a></span></p>
<p><span id="e18n" title="Click to view full content"><strong>Anselm Hook:</strong> I think that&#8217;s a great point. The search interface, the kind of Internet that we&#8217;re used to, the way we talk to the network now, is fundamentally open end to end. Yes, you can have your oligarchies inside of it, as we see with Facebook, but you can always start your own venture up and you can do a search on something, and you can find that, that website and you can join it or you can put up your own webpage and people can find it. </span></p>
<p><span id="e18n" title="Click to view full content">The translation layer, the idea of text search and the ability to discovery power and the serendipity and the openness of that discovery, it&#8217;s pretty open right now. We do have some serious boundaries of language, which is one of the reasons I was working at the <a id="xg:8" title="Meadan.org" href="http://www.imug.org/events/past2007.htm#meadan">Meedan.org</a> [hybrid distributed, natural language translation] for a couple of years, trying to bridge that issue.</span></p>
<p>But here, as we move towards a physical internet where there&#8217;s no clicking and there&#8217;s no interface and the computer&#8217;s just telling you what it thinks you&#8217;re looking at, translating, you know, an image of a billboard to the name of the rock star who&#8217;s on that billboard, or translating the list of ingredients on a can of soup to the source outlets where it thinks that, those ingredients came from. When you have that kind of automated mediation, the question of trust definitely arises.</p>
<p>And we haven&#8217;t seen the Clay Shirkys or the Larry Lessigs of the world start to talk about this yet.Â  Although I suspect that in the next four or five years that the zero click interface will become the primary interface, that we&#8217;ll have&#8230;we&#8217;ll come to assume that what we see with the extra enhanced data we get projected onto our view is the truth. Yet, at the same time, there is just no structure or mechanism even being considered for a democratic ownership of it.</p>
<p><span id="fv3x" title="Click to view full content">We have with DNS, for example, the idea that you can register the domain name and people can search for it, and find it, and go to it. There&#8217;s no such thing as an Image DNS, or an image translation to DNS right now. What does it mean when everything is just &#8220;magic&#8221;, when there&#8217;s no way for you to be a part of the conversation, where you&#8217;re just a consumer of what people tell you, or of what one company right now, tells you, is reality? That&#8217;s a real concern.<br />
<strong><br />
Tish Shute: </strong>This, to me is the most important question at the moment. I mean, it&#8217;s the big one and it&#8217;s the place to put energy if you love the Internet [and what it can now become] right. You&#8217;ve got to put a lot of energy into this because this [a democratized view of the physical world as a platform] won&#8217;t just happen, because there&#8217;s a lot of momentum already for it to be heavily privatized, partly because, one reason is, some of the computer vision algorithms that, say, make sense of things like the geotag photographs are not open.Â  I mean, for example, the beautiful maps that have been made from the University of Washington [from Flickr geotagged photo sets], that isn&#8217;t in the public domain.</span></p>
<p><strong>Anselm Hook:</strong> Right. Tish, and in fact you&#8217;re referring to [with the maps from the Flickr photos] to ordinary maps and the fact we&#8217;ve already seen that maps lie, we&#8217;ve already, seen how much maps are reflecting a certain truth that becomes the normative truth. Google maps reflects roads, because this is roads and cars, right? Only recently have they thought about buses and walking. So the normative view that people assume is the reality, is showing off you know Starbucks, and roads, and cars, that becomes the default, those prejudices are just assumed, you know, the truth. But they&#8217;re not the truth at all.</p>
<p>I was talking to a friend of mine in Montreal, [Renee Sieber], and she said that their Indian portage routes are a bridge across land and water, they don&#8217;t think of a piece of land and a piece of water as being different things, they think of them as one thing: a route. It&#8217;s already a different kind of language we can&#8217;t even reflect it.</p>
<p>So not only is there this kind of formal, anthropological lie, in a sense, but there&#8217;s this way that we deceive ourselves because of our own prejudices.</p>
<p><strong>Tish Shute:</strong> Yes I agree and that&#8217;s why I think when I saw some of the things you had written on the ImageWiki point clearly to the need to create a social commons. We need a social commons for the real-time physical internet, we need it for the image hyperlinks that make sense of that.</p>
<p>And it&#8217;s a complicated thing in a sense, though, because we don&#8217;t actually have a good distributed infrastructure for AR yet, and I found exploring AR Wave, that at last we have the suggestion of an open, federated protocol for real-time communication &#8211; the wave federation protocol. [Real time communications is a very important part of AR].Â  It isn&#8217;t an actuality yet where lots of people are able to use it, set up their own servers, and there&#8217;s not a standard all the way throughÂ  [there is not a standard for how data is sent between the client and the server].</p>
<p>But Wave Federation Protocol does make possible truly distributed social AR.Â  I started thinking when I saw ImageWiki that to bring ImageWiki together with the social collaborative power of distributed AR.Â  This really would be the basis of creating a social commons for augmented reality and the physical world as a platform &#8211; the <span id="np6x" title="Click to view full content">start of a bottom up with deep social collaboration on how we create augmented reality colloquial maps that can inform a hyper-local of the world.</span></p>
<p><strong>Anselm Hook:</strong> Yes. When Paige Saez, John Wiseman, and myself, and a few other folksâ€¦ You know, Benjamin Foote, Marlin Pohlmann, and a couple other people started to play with this, we quickly found thatâ€¦ We started to realize, â€œOh, this kind of thing will be at least as popular as IRC. There will be at least as many people doing this as chatting in little virtual spaces. Thereâ€™ll be at least as many people decorating the world with augmented reality markup, and maybe using the real world as a kind of barcode for translating what youâ€™re looking at into an artifact, a digital artifact.</p>
<p>And<span id="csy2" title="Click to view full content"> that the size of that space was going to be huge, basically. Maybe not quite as commodifiable as Twitter, but certainly very energetic.</span></p>
<p>Many of the projects we did were just kind of looking at these kinds of issues sort of from an artistic, technical, and political point of view. We werenâ€™t so much posing complete solutions, but simply using a praxis to explore the idea with an implementation, as a foundation for this discussion. So I think we sort of opened that can of worms for sure.</p>
<p><strong>Tish Shute:</strong> Did you actually set up ImageWiki to be working as a location based app yet?</p>
<p><strong>Anselm Hook:</strong> It is a location based app. It collects your longitude, latitude, and the image and stores it. And then it uses that as a way to translate that image to anything else. It could be a piece of text or a URL.<br />
<strong><br />
Tish Shute:</strong> So there is a smartphone app, but you didnâ€™t take it as far as an AR app yet?</p>
<p><strong>Anselm Hook:</strong> No. We didnâ€™t do a heads-up view. There are apps on the iPhone store that do that, but they donâ€™t do the brute force image recognition that we were using. We used a third party off the shelf algorithm that we found on Wikipedia and downloaded the source code, and threw it on the server. And John Wiseman in LA wrote the scalable database backend so that we could scale the actualâ€¦<br />
<strong><br />
Tish Shute:</strong> So how did you set the iphone app up to work?</p>
<p><strong>Anselm Hook</strong>: The iPhone side was very simple. You take a picture of something and it tells you what it is. That is all it did. We would take the location, but the client side, the iPhone side, just rendered, returned to youâ€¦It said, â€œSomeone said that this picture of a barking dog is an advertisement for a local band.â€</p>
<p><strong>Tish Shute:</strong> Right. So basically it was a geo-tagged?</p>
<p><strong>Anslem Hook:</strong> Yes. We are just collecting the geo information. Actually, there were a whole lot of technical challenges. The whole idea of ImageWiki is actually kind of beyond our technical ability for a small team like us. It really does take a team, a group like Google, to do this kind of thing in a scalable way.<br />
<strong><br />
Tish Shute:</strong> Why is that?</p>
<p><strong>Anslem Hook:</strong> There are two sides. There is the curating the images. I think that is the job of groups like us &#8211; open source groups who can curate images <span id="vxty" title="Click to view full content">that are owned by the community. And then the searching side, the algorithm side, where you are actually matching the fingerprint of one image to images in your database, that takes a much moreâ€¦that is much more industrial.Â  We get both sides, ours is not a scalable solution. It is mostlyâ€¦proving that it could be done was important.<br />
</span><br />
<span id="a3ou" title="Click to view full content"><strong>Tish Shute: </strong>In terms of hooking Imagewiki up to the collaborative possibilities of AR Wave wouldn&#8217;t federation pose some interesting possibilities for scaling search algorithms and all that?</span></p>
<p><span id="vp27" title="Click to view full content"><strong>Anselm Hook:</strong> Yes. And what is funny also, incidentally, is that, nevertheless, we did look for some financial support for it, but we couldnâ€™tâ€¦we just didnâ€™t find the investors to scale it. Now, other companies like SnapTell took a shot at it. And they have an app in the iPhone store where you can point at a beer bottle and get back the name of the beer bottle.</span></p>
<p>The classic example everyone uses is a book. Amazon has all the image jackets of all their books. You can point SnapTell at almost any book and get back links to buy that at Amazon, the price of the book, and user comments on the book. So they are treating Amazon as the canonical voice of the book, for better or worse. That is the state of the art so far, up until Google Goggles came out a little while ago, which actually blows it out of the water. But, that is where we are now.</p>
<p><strong>Tish Shute: </strong>Right. But the point you raise about how when something like Amazon comes canonical of what is book, right, this is the whole point, isnâ€™t it?</p>
<p><strong>Anselm Hook:</strong> Is Amazon truth? Itâ€™s not bad. Jeff Bezos seems like a nice guy, but, you know.</p>
<p><strong>Tish Shute:</strong> And this is the point of having these open infrastructures for this.Â  And this should be obvious in a way, but it comes back to the thing about what made the Internet great was the fact that even though as you note, you get an oligarchy like Facebook, but people always could just go off and do something else, right? Because the fundamental infrastructure was basically open and designed to be available for everyone. And many people have championed that and fought for it hard [to maintain this openness] havenâ€™t they? They have devoted their lives to keeping it that way, even if the oligarchies have done their thing.<br />
<strong><br />
Anselm Hook:</strong> Yes. There are really some things that are underneath all of this that havenâ€™t been solved yet.</p>
<p>One is that the trust in social networks has not been built yet, so we canâ€™t do peer based recommendations very well. We canâ€™t filter noise by peers. Twitter kind of is moving there, but I donâ€™t just want to listen to my Twitter friends. I want to listen to my friends of friends. If I am getting truth from somebody, I want to get that truth from people my friends say that they trust.</p>
<p>Then the second problem is that there is a search business. My friend Ed Bice, who owns <a id="lir5" title="Meedan" href="http://beta.meedan.net/">Meedan</a>, always says that a search itself, a search request, is an opportunity to makeâ€¦is a publishing moment. It is an opportunity to say what you think. In the real world, if you are just hanging out with humans and you look somewhere, other people might look at your gaze and they might look at what you are looking at. Your gaze itself is a public act.</p>
<p>Gaze is a soft act, but it is one that is visible. With Google, the gaze<span id="zuat" title="Click to view full content"> of four billion people is invisible. We don&#8217;t what people are looking at, there is no opportunity to participate. Let me give you a real example.Â  I have taken a image of something of the bust of figure or a statue.Â  Why can&#8217;t the museum in Cairo look at my request and tell me oh yeah that is Tutankhamen, or that is Nefertiti right? Why can&#8217;t they have a chance to participate in the search and respond to me?</span></p>
<p><span id="zuat" title="Click to view full content"> Right now the the only person that responds is Google when I do a search. We need to invert the search pyramid and open up search, so that search is a democratic act, so that you can publicly permission your searches so that other people can respond and so that people can reach out to you, not just you having to do a dialogue. </span></p>
<p><span id="zuat" title="Click to view full content">The common example of this.. and we see this everywhere: I am looking for a slice of pizza right, now I am hungry I want some pizza. I have to ask Google, look find twelve websites, call twelve phone numbers, and talk to each of the twelve stores, and ask them are they open late, is the food organic, is the food in any good, do my friends like it.</span></p>
<p>Whereas what I should be able to do is just say it&#8217;s a search moment and I am interested in pizza. If those pizza places my criteria like you know my friend&#8217;s like them and they are organic, they are open, then that pizza place can call me. I have the money why should I do the search? So the whole business of search, the whole structure of search is predicated around a revenue model, but its a really short-sighted revenue model, its not a brokerage.</p>
<p>Search isn&#8217;t search, search is hand waving.Â  These should be moments for us to have a discourse. So problem we are seeing in AR with communication of the right information is actually underneath AR, at the level of the whole infrastructure.</p>
<p>Search needs to be inverted, trust filters need to be built. We need to democratically own our data institutions.Â  We don&#8217;t right now.Â  That will be more of a concern, especially with AR.</p>
<p><strong>Tish Shute: </strong>Yes, especially with AR, which is this why got all excited about federation.Â  Do you think federation has the potential, an opportunity to create [the new infrastructure you describe?]</p>
<p><strong>Anselm Hook:</strong> Absolutely,Â  its absolutely what we must do. It is much harder to do. It is absolutely critical.</p>
<p><span id="lwzk" title="Click to view full content"><strong>Tish Shute:</strong> And why is it much harder to do? Could you explain that?</span></p>
<p><strong>Anselm Hook:</strong> Well, it&#8217;s very easy for a bunch of hackers to build a service that you log into and fetch some data, it&#8217;s a single thing. They don&#8217;t have to talk anybody, they can use their own protocols, they can hack it, it&#8217;s a big black box, behind the scenes. There&#8217;s running back and forth in a giant Chinese room delivering manuscripts and scrolls to you. Whatever is behind the black box, you donâ€™t care, it just works.Â  But when you federate, you need to actually publish and have standards, and then you&#8217;re talk about semantic, everyone starts getting really excited and wave some hands. It becomes a disaster. It&#8217;s, at least, another power order, more difficult than DIY, build it yourself.</p>
<p><strong>Tish Shute:</strong> So, in terms of what Google Wave have done with their approach to federation, what do you think have been their achievements and what do you think is their obstacles? What do you think are the failings of the Wave? Because it&#8217;s the first big public major player backed approach to something federated, isnâ€™t it? In real time.</p>
<p><strong>Anselm Hook:</strong> Yes. I think the most important non-federated service on the planet today is Twitter.Â  <a id="uhg3" title="Ident.ic.a" href="http://identi.ca/group/identica">Identi.ca</a> it&#8217;s not getting any traction with respect to Twitter. [ Even though ] Identi.ca is a federated version of Twitter and is very good. [ Identica is now <a id="w05j" title="Status.net" href="http://status.net/">Status.net</a> ] . So, we see already there that small players arenâ€™t being competitive. Then look at other services like IRC. IRC is the secret backbone of the Net. All the open source projects, all the teams, all the people that work on opensource projects are all on IRC. It&#8217;s the only way they get anything done.</p>
<p>With Google Wave, and the protocols underneath Google Wave, we see an attempt to build a similar kind of real time, but distributed protocol. I think it&#8217;s the right direction. I think, people should pick up the offering and make their own servers. I think that protocol is really great, I think the fact that is compressed, its high performance, <span id="md2h" title="Click to view full content">it is small, real-time of blobs of data flying around, all exactly the way it should be done. It is getting close to this kind of rewrite of the Internet that people keep talking about, because, you know, the net protocols are so bad, it is starting to treat the idea of intermittent exchanges being more transitory, volatile, and not heavy.</span></p>
<p><strong>&#8230;.to be continued.Â  Part 2 coming soon!<br />
</strong></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2010/01/17/visual-search-augmented-reality-and-a-social-commons-for-the-physical-world-platform-interview-with-anselm-hook/feed/</wfw:commentRss>
		<slash:comments>17</slash:comments>
		</item>
		<item>
		<title>Augmented Reality DevCamp NYC: The Big ARNY &#8211; A Collaborative AR Game Project Modeled After Swarm of Angels</title>
		<link>http://www.ugotrade.com/2009/12/06/augmented-reality-devcamp-nyc-the-big-arny-a-collaborative-ar-game-project-modeled-after-swarm-of-angels/</link>
		<comments>http://www.ugotrade.com/2009/12/06/augmented-reality-devcamp-nyc-the-big-arny-a-collaborative-ar-game-project-modeled-after-swarm-of-angels/#comments</comments>
		<pubDate>Sun, 06 Dec 2009 13:20:50 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Android]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[AR DevCamp]]></category>
		<category><![CDATA[AR DevCampNYC]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[ardevcamp]]></category>
		<category><![CDATA[ARDevCampNYC]]></category>
		<category><![CDATA[aygmented reality]]></category>
		<category><![CDATA[Goblin XNA]]></category>
		<category><![CDATA[Google Wave Protocol for AR]]></category>
		<category><![CDATA[marker based augmented reality]]></category>
		<category><![CDATA[markerless augmented reality]]></category>
		<category><![CDATA[Microvision]]></category>
		<category><![CDATA[mobile social augmented reality]]></category>
		<category><![CDATA[mobile social games]]></category>
		<category><![CDATA[open augmented reality]]></category>
		<category><![CDATA[open distributed augmented reality]]></category>
		<category><![CDATA[semantic web and augmented reality]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[social augmented reality]]></category>
		<category><![CDATA[The Big ARNY]]></category>
		<category><![CDATA[The Big ARNY Game]]></category>
		<category><![CDATA[The Open Planning Project]]></category>
		<category><![CDATA[TOPP]]></category>
		<category><![CDATA[TOPPLABS]]></category>
		<category><![CDATA[Wave enabled AR]]></category>
		<category><![CDATA[Wave Federation Protocol]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4996</guid>
		<description><![CDATA[First an incredibly big thank you to The Open Planning Project office (TOPP) &#8211; @TOPPLabs, and Sophia Parafina, @spara,Â  for organizing, hosting,Â  sponsoring and providing so much inspiration for this event. There is an incomplete list of attendees below, and there were about 70 people at one point watching the Ustream (thank you Dimitri Darras [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><object classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" width="400" height="300" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,40,0"><param name="flashvars" value="offsite=true&amp;lang=en-us&amp;page_show_url=%2Fphotos%2Fugotrade%2Fsets%2F72157622945515856%2Fshow%2F&amp;page_show_back_url=%2Fphotos%2Fugotrade%2Fsets%2F72157622945515856%2F&amp;set_id=72157622945515856&amp;jump_to=" /><param name="allowFullScreen" value="true" /><param name="src" value="http://www.flickr.com/apps/slideshow/show.swf?v=71649" /><param name="allowfullscreen" value="true" /><embed type="application/x-shockwave-flash" width="400" height="300" src="http://www.flickr.com/apps/slideshow/show.swf?v=71649" allowfullscreen="true" flashvars="offsite=true&amp;lang=en-us&amp;page_show_url=%2Fphotos%2Fugotrade%2Fsets%2F72157622945515856%2Fshow%2F&amp;page_show_back_url=%2Fphotos%2Fugotrade%2Fsets%2F72157622945515856%2F&amp;set_id=72157622945515856&amp;jump_to="></embed></object></p>
<p>First an incredibly big thank you to <a title="http://openplans.org/contact/" rel="nofollow" href="http://openplans.org/contact/">The Open Planning Project office (TOPP)</a> &#8211; <a href="http://twitter.com/TOPPLabs" target="_blank">@TOPPLabs,</a> and Sophia Parafina, <a href="http://twitter.com/spara" target="_blank">@spara</a>,Â  for organizing, hosting,Â  sponsoring and providing so much inspiration for this event.</p>
<p>There is an incomplete list of attendees below, and there were about 70 people at one point watching the Ustream (thank you <a href="../../tridarras.com/#http://www.dimitridarras.com/images/dd_work.jpg" target="_blank">Dimitri Darras</a> and friend &#8211; sorry I missed getting your card!) for setting this up.</p>
<p>There were at least ten or more people participating in a live skype conference moderated by Sophia with great skill.</p>
<p>I am sorry I didn&#8217;t get everyone&#8217;s contact info.Â  But please feel free to add you name into the comments of this post if I have missed you out.</p>
<p>After a gearheady morning, we spent the afternoon and evening brain storming the &#8220;The Big ARNY&#8221; &#8211; &#8220;a collaborative game development project modeled after a <a href="http://aswarmofangels.com/" target="_blank">Swarm of Angels</a>.&#8221;</p>
<p>Some of the morning tech discussion highlights included:</p>
<p>*skype presentations on <a href="http://arwave.wiki.zoho.com/HomePage.html" target="_blank">AR Wave</a> from <a href="http://www.lostagain.nl/" target="_blank">Thomas Wrobel</a>, <a href="http://www.joelamantia.com/" target="_blank">Joe Lamantia, </a><a href="http://matthieupierce.com/" target="_blank">Matthieu Pierce</a>.</p>
<p>*the <a href="http://www.youtube.com/watch?v=h4HmYQPejFk">beginnings of an iphone client</a> from the <a href="http://code.google.com/p/pygowave-server/" target="_blank">PyGoWave</a> Crew.</p>
<p>*discussing <a href="http://www.microvision.com/wearable_displays/index.html" target="_blank">Microvision</a>, Augmented Reality eyewear &#8211; and trying out<a href="http://twitpic.com/s9zjt"> Nomad Unit</a> courtesy of <a href="http://augmentation.wordpress.com/" target="_blank">Noah Zerkin</a>, @NoaZark</p>
<p>*an awesome deep dive into the code of the <a href="http://www1.cs.columbia.edu/~ohan/" target="_blank">open Goblin XNA VR/AR platform</a> &#8211; courtesy of <a href="http://www.cs.columbia.edu/~ohan/" target="_blank">Ohan Oda</a> (pic below) <a href="http://www.ustream.tv/recorded/2719336" target="_blank">video of presentation here</a>.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/ohanodapost.jpg"><img class="alignnone size-medium wp-image-5016" title="ohanodapost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/ohanodapost-300x199.jpg" alt="ohanodapost" width="300" height="199" /></a></p>
<p>Ori Inbar<a href="http://gamesalfresco.com/2009/12/05/live-from-nyc-augmented-reality-dev-camp/" target="_blank"> live blogged the morning sessions on Games Alfresco. </a></p>
<p>But, during the afternoon, Ori presented, and we all got so caught up in the brainstorming ofÂ  &#8220;The Big ARNY Game&#8221; that live blogging, skyping, and twittering ground to a near halt.Â Â  The &#8220;meat space&#8221; (perhaps the slide show captures some ofÂ  incredible coolness of the location) was alive with brilliant ideas that were matched by an incredibly high level of technical input &#8211; see the AR DevCamp attendees list below.</p>
<p>During the game session we really had a master class in augmented reality tech. Â  <a href="http://www1.cs.columbia.edu/~feiner/" target="_blank">Steven Feiner&#8217;s</a> awesome discussion of markers really opened my mind to exploring markers in a new way.Â  And the geolocated data discussion with Sophia Parafina, <a href="http://www.maploser.com/?page_id=6" target="_blank">Kate Chapman,</a> <a href="http://phil.ashlock.us/" target="_blank">Philip Ashlock</a>,Â  and Steve Feiner at dinner was very interesting.Â  The opportunity to break out into smaller in depth discussions during the day was one of the valuable opportunities of AR DevCamp, so I can&#8217;t possibly mention them all.Â  But thank you everybody!</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/BigARNYpost.jpg"><img class="alignnone size-medium wp-image-5005" title="BigARNYpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/BigARNYpost-300x199.jpg" alt="BigARNYpost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/ardevcampnycpost.jpg"><img class="alignnone size-medium wp-image-5013" title="ardevcampnycpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/ardevcampnycpost-300x199.jpg" alt="ardevcampnycpost" width="300" height="199" /></a></p>
<p>We did have some fun with low tech AR too &#8211; courtesy of Thomas Wrobel &amp; Bertine van HÃ¶vell, <strong><a href="http://www.lostagain.nl/" target="_blank">Lost Again</a></strong> (their business card is the coolest AR card I have seen to date).Â  In the pic on the right, I try out their business card/AR overlay on @comogard as he presents.Â  The lighting does not do the overlay justice in my photo (on right), but I think you get the idea at least.</p>
<p>Unfortunately we didn&#8217;t manage to hook up our afternoon live session with <a href="http://www.ardevcamp.org/wiki/index.php?title=Main_Page" target="_blank">The Mountain View AR DevCamp</a>, as we lost the streaming laptop.Â  But hopefully we will be able to catch up on each other&#8217;s activities with session notes on the<a href="http://www.ardevcamp.org/wiki/index.php?title=Main_Page" target="_blank"> AR DevCamp Wiki.</a> There is also a public wave,Â  <a href="https://wave.google.com/wave/#restored:wave:googlewave.com!w%252BTfPQziYJA" target="_blank">AR Dev Camp NYC Shared Notes</a>.</p>
<p><a href="http://www1.cs.columbia.edu/~swhite/" target="_blank">Sean White</a> set the afternoon off to a great start by collecting topics and organizing topics of interest on the board.Â  While we didn&#8217;t get time to cover everything, it was interesting how, by working on developing a collaborative game project, we had to tackle many of the topics suggested, and come up with workable approaches.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/Seanwhitenotespost.jpg"><img class="alignnone size-medium wp-image-5006" title="Seanwhitenotespost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/Seanwhitenotespost-300x199.jpg" alt="Seanwhitenotespost" width="300" height="199" /></a></p>
<p>Next weekend <a title="http://openplans.org/contact/" rel="nofollow" href="http://openplans.org/contact/">TOPP</a> <span><span> will host the <a href="http://opennyforum.org/2009/11/open-ny-summit-09/" target="_blank">OpenNY Summit &amp; Codeathon</a> on Dec 11 &amp; 12, an event &#8220;</span></span><span>produced by open government practitioners and volunteers.&#8221;Â  This would be another great place to explore some of the citizen 2.0 mobile, social AR game ideas that came up at AR DevCampNYC.  In addition, Ori Inbar has started an <a href="http://www.meetup.com/ARNY-Augmented-Reality-New-York/">AR New York Meetup</a>.<br />
</span></span></p>
<p>Below is an incomplete list of AR DevCampNYC attendees.</p>
<p><strong>Sophia Parafina</strong>, OpenGeo, @spara, organizer<strong> </strong></p>
<p><strong>Marco Neumann</strong>, <a href="http://www.konallc.com/" target="_blank">KONA</a>, @neumarcx, interested in developing Semantic Web based Augmented Reality demos.<strong> </strong></p>
<p><strong>Tish Shute</strong>, <a title="http://www.ugotrade.com" rel="nofollow" href="../../">Web</a>,<a title="http://twitter.com/tishshute" rel="nofollow" href="http://twitter.com/tishshute">@tishshute</a>, Open Distributed AR, Google Wave Protocol for AR, Imagining the Future of the Outernet<strong> </strong></p>
<p><strong>Dimitri Darras</strong>, @dimitridarras, Visual designer, web developer, and virtual worlds content creator. Interested in multimodal input and AR/Virtual Worlds integration.<strong></strong></p>
<p><strong>Heidi Hysell</strong>, @heidihysell, Creative Technologist &amp; Software Engineer; Interested in the application of AR for entertainment technology for print, web and video.<strong></strong></p>
<p><strong>Joe Lamantia</strong>, Muku / ARWave, <a title="http://joelamantia.com" rel="nofollow" href="http://joelamantia.com/">@mojoe</a> Amsterdam, interested in creating open frameworks, social augmented experiences, emerging media &#8211; (attending via skype)<strong></strong></p>
<p><strong>Kate Chapman</strong>, Web Developer, FortiusOne, @wonderchook<strong></strong></p>
<p><strong>Matthieu Pierce</strong>, <a title="http://matthieupierce.com" rel="nofollow" href="http://matthieupierce.com/">itinerant poet</a>, @matthieupierce, Pittsburgh, PA.  Interested in <a title="AR Use Cases" href="http://www.ardevcamp.org/wiki/index.php?title=AR_Use_Cases">AR Use Cases</a> and observation. Attending via Skype.</p>
<p><strong>Ori Inbar</strong>, <a title="http://ogmento.com" rel="nofollow" href="http://ogmento.com/">ogmento</a> <a title="http://gamesalfresco.com" rel="nofollow" href="http://gamesalfresco.com/">games alfresco</a> Let&#8217;s get together to brainstorm on the &#8220;Big AR NY Game&#8221;: The first location-based, social, augmented reality game designed for New York by New Yorkers.<strong></strong></p>
<p><strong>Noah Zerkin</strong>, <a title="http://augmentation.wordpress.com" rel="nofollow" href="http://augmentation.wordpress.com/">[1]</a> &#8211; AR software and hardware interfaces; Exploring the idea of an AROS.<strong></strong></p>
<p><strong>Ohan Oda</strong>, <a title="http://www.cs.columbia.edu/~ohan" rel="nofollow" href="http://www.cs.columbia.edu/%7Eohan">webpage</a> &#8211; Columbia University; NYC<strong></strong></p>
<p><strong>Sean White</strong>, <a title="http://www.cs.columbia.edu/~swhite" rel="nofollow" href="http://www.cs.columbia.edu/%7Eswhite">webpage</a> &#8211; Augmented reality research at Columbia University and Smithsonian Institution.<strong></strong></p>
<p><strong>Steve Henderson</strong>, Columbia University, <a title="http://www.cs.columbia.edu/~henderso" rel="nofollow" href="http://www.cs.columbia.edu/%7Ehenderso">webpage</a>, <a title="http://twitter.com/stevehenderson" rel="nofollow" href="http://twitter.com/stevehenderson">@stevehenderson</a><strong></strong></p>
<p><strong>Omer Gunes</strong>, [<a title="http://www.cs.nyu.edu/~ofg201" rel="nofollow" href="http://www.cs.nyu.edu/%7Eofg201">[2]</a> webpage] &#8211; NLP, Speech Recognition, Mobile Software Development<strong></strong></p>
<p><strong>Steve Feiner</strong>, Computer Graphics and User Interfaces Lab, Dept. of Computer Science, Columbia University, <a title="http://www.cs.columbia.edu/~feiner" rel="nofollow" href="http://www.cs.columbia.edu/%7Efeiner">personal</a>, <a title="http://www.cs.columbia.edu/graphics/top.html" rel="nofollow" href="http://www.cs.columbia.edu/graphics/top.html">lab</a> &#8211; Augmented reality, mobile/wearable computing.<strong></strong></p>
<p><strong>Jon Russek</strong>, NYC, <a title="http://www.russek.org" rel="nofollow" href="http://www.russek.org/">website</a>, <a title="http://twitter.com/filmaddict" rel="nofollow" href="http://twitter.com/filmaddict">@filmaddict</a> &#8211; AR as applied to film/theater/art.<strong></strong></p>
<p><strong>Daniel Leslie</strong>, <a title="http://reflexionsdata.com" rel="nofollow" href="http://reflexionsdata.com/">Reflexions Data, LLC</a> <a title="http://twitter.com/dan_leslie" rel="nofollow" href="http://twitter.com/dan_leslie">@dan_leslie</a>, Principal at application consulting/development firm where we&#8217;re working on a mobile app for proximity-based real time social graph analysis.<strong></strong></p>
<p><strong>Donald Schwartz</strong>, NYC, <a title="http://twitter.com/Ishkahbibel" rel="nofollow" href="http://twitter.com/Ishkahbibel">@Ishkahbibel</a>virtual worlds, social media, technology writer</p>
<p><strong>David Oliver</strong>, <a title="http://olivercoady.com" rel="nofollow" href="http://olivercoady.com/">Oliver+Coady, Inc. NYC</a>, <a title="http://twitter.com/davidmoliver" rel="nofollow" href="http://twitter.com/davidmoliver">@davidmoliver</a> mobile strategy, mobile product definition, mobile development.</p>
<p><strong>Chris Grayson</strong>, NYC, Twitter: <a title="http://twitter.com/chrisgrayson" rel="nofollow" href="http://twitter.com/chrisgrayson">@chrisgrayson</a> | Blog: <a title="http://gigantico.squarespace.com" rel="nofollow" href="http://gigantico.squarespace.com/">GigantiCo</a> | Contributor: <a title="http://hplusmagazine.com" rel="nofollow" href="http://hplusmagazine.com/">H+ Magazine</a> | Web developer and marketing consultant &#8212; Interests: Future of commercial mobile AR / Outernet (GeoSearch &amp; OOH marketing convergence); Future AR Form Factors; AR/Virtual Worlds integration re: distance learning &amp; collaboration.</p>
<p><strong>Saul Devitt</strong>, NYC<strong></strong></p>
<p><strong>Bert Picot</strong>, NYC via Skype probably around 10:30 am for a few hours. Very interested in learning the value chain for AR applications and the development of applications for Festivals and live entertainment.</p>
<p><strong>MZ </strong>â€“ startup to develop a platform to use semantic data to enable virtual worlds</p>
<p><strong>Jon Russek</strong> â€“ film production + law + internet. Interested in AR as artistic medium for creativity</p>
<p><strong>Davide Byron</strong> â€“ developed the game <a href="http://www.youtube.com/watch?v=k2BK9VAk3RY" target="_blank">Spads and Fokkers</a> and <a href="http://spadsandfokkers.sourceforge.net/" target="_blank">code</a></p>
<p><strong>Philip Ashlock </strong><a href="http://twitter.com/philipashlock" target="_blank">@philipashlock</a>, The Open Planning Project</p>
<p><span><strong>Michael Keating</strong>, The Open Planning Project</span></p>
<p><strong>Yohan Baillot</strong>, <a title="http://twitter.com/yohanBaillot" rel="nofollow" href="http://twitter.com/yohanBaillot">@yohanBaillot</a> future of commercial mobile AR, emerging AR standards</p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/12/06/augmented-reality-devcamp-nyc-the-big-arny-a-collaborative-ar-game-project-modeled-after-swarm-of-angels/feed/</wfw:commentRss>
		<slash:comments>6</slash:comments>
		</item>
	</channel>
</rss>
